View Javadoc
1   // Generated by the protocol buffer compiler.  DO NOT EDIT!
2   // source: Client.proto
3   
4   package org.apache.hadoop.hbase.protobuf.generated;
5   
6   public final class ClientProtos {
7     private ClientProtos() {}
8     public static void registerAllExtensions(
9         com.google.protobuf.ExtensionRegistry registry) {
10    }
11    /**
12     * Protobuf enum {@code Consistency}
13     *
14     * <pre>
15     **
16     * Consistency defines the expected consistency level for an operation.
17     * </pre>
18     */
19    public enum Consistency
20        implements com.google.protobuf.ProtocolMessageEnum {
21      /**
22       * <code>STRONG = 0;</code>
23       */
24      STRONG(0, 0),
25      /**
26       * <code>TIMELINE = 1;</code>
27       */
28      TIMELINE(1, 1),
29      ;
30  
31      /**
32       * <code>STRONG = 0;</code>
33       */
34      public static final int STRONG_VALUE = 0;
35      /**
36       * <code>TIMELINE = 1;</code>
37       */
38      public static final int TIMELINE_VALUE = 1;
39  
40  
41      public final int getNumber() { return value; }
42  
43      public static Consistency valueOf(int value) {
44        switch (value) {
45          case 0: return STRONG;
46          case 1: return TIMELINE;
47          default: return null;
48        }
49      }
50  
51      public static com.google.protobuf.Internal.EnumLiteMap<Consistency>
52          internalGetValueMap() {
53        return internalValueMap;
54      }
55      private static com.google.protobuf.Internal.EnumLiteMap<Consistency>
56          internalValueMap =
57            new com.google.protobuf.Internal.EnumLiteMap<Consistency>() {
58              public Consistency findValueByNumber(int number) {
59                return Consistency.valueOf(number);
60              }
61            };
62  
63      public final com.google.protobuf.Descriptors.EnumValueDescriptor
64          getValueDescriptor() {
65        return getDescriptor().getValues().get(index);
66      }
67      public final com.google.protobuf.Descriptors.EnumDescriptor
68          getDescriptorForType() {
69        return getDescriptor();
70      }
71      public static final com.google.protobuf.Descriptors.EnumDescriptor
72          getDescriptor() {
73        return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor().getEnumTypes().get(0);
74      }
75  
76      private static final Consistency[] VALUES = values();
77  
78      public static Consistency valueOf(
79          com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
80        if (desc.getType() != getDescriptor()) {
81          throw new java.lang.IllegalArgumentException(
82            "EnumValueDescriptor is not for this type.");
83        }
84        return VALUES[desc.getIndex()];
85      }
86  
87      private final int index;
88      private final int value;
89  
90      private Consistency(int index, int value) {
91        this.index = index;
92        this.value = value;
93      }
94  
95      // @@protoc_insertion_point(enum_scope:Consistency)
96    }
97  
98    public interface AuthorizationsOrBuilder
99        extends com.google.protobuf.MessageOrBuilder {
100 
101     // repeated string label = 1;
102     /**
103      * <code>repeated string label = 1;</code>
104      */
105     java.util.List<java.lang.String>
106     getLabelList();
107     /**
108      * <code>repeated string label = 1;</code>
109      */
110     int getLabelCount();
111     /**
112      * <code>repeated string label = 1;</code>
113      */
114     java.lang.String getLabel(int index);
115     /**
116      * <code>repeated string label = 1;</code>
117      */
118     com.google.protobuf.ByteString
119         getLabelBytes(int index);
120   }
121   /**
122    * Protobuf type {@code Authorizations}
123    *
124    * <pre>
125    **
126    * The protocol buffer version of Authorizations.
127    * </pre>
128    */
129   public static final class Authorizations extends
130       com.google.protobuf.GeneratedMessage
131       implements AuthorizationsOrBuilder {
132     // Use Authorizations.newBuilder() to construct.
133     private Authorizations(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
134       super(builder);
135       this.unknownFields = builder.getUnknownFields();
136     }
137     private Authorizations(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
138 
139     private static final Authorizations defaultInstance;
140     public static Authorizations getDefaultInstance() {
141       return defaultInstance;
142     }
143 
144     public Authorizations getDefaultInstanceForType() {
145       return defaultInstance;
146     }
147 
148     private final com.google.protobuf.UnknownFieldSet unknownFields;
149     @java.lang.Override
150     public final com.google.protobuf.UnknownFieldSet
151         getUnknownFields() {
152       return this.unknownFields;
153     }
154     private Authorizations(
155         com.google.protobuf.CodedInputStream input,
156         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
157         throws com.google.protobuf.InvalidProtocolBufferException {
158       initFields();
159       int mutable_bitField0_ = 0;
160       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
161           com.google.protobuf.UnknownFieldSet.newBuilder();
162       try {
163         boolean done = false;
164         while (!done) {
165           int tag = input.readTag();
166           switch (tag) {
167             case 0:
168               done = true;
169               break;
170             default: {
171               if (!parseUnknownField(input, unknownFields,
172                                      extensionRegistry, tag)) {
173                 done = true;
174               }
175               break;
176             }
177             case 10: {
178               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
179                 label_ = new com.google.protobuf.LazyStringArrayList();
180                 mutable_bitField0_ |= 0x00000001;
181               }
182               label_.add(input.readBytes());
183               break;
184             }
185           }
186         }
187       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
188         throw e.setUnfinishedMessage(this);
189       } catch (java.io.IOException e) {
190         throw new com.google.protobuf.InvalidProtocolBufferException(
191             e.getMessage()).setUnfinishedMessage(this);
192       } finally {
193         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
194           label_ = new com.google.protobuf.UnmodifiableLazyStringList(label_);
195         }
196         this.unknownFields = unknownFields.build();
197         makeExtensionsImmutable();
198       }
199     }
200     public static final com.google.protobuf.Descriptors.Descriptor
201         getDescriptor() {
202       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Authorizations_descriptor;
203     }
204 
205     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
206         internalGetFieldAccessorTable() {
207       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Authorizations_fieldAccessorTable
208           .ensureFieldAccessorsInitialized(
209               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.Builder.class);
210     }
211 
212     public static com.google.protobuf.Parser<Authorizations> PARSER =
213         new com.google.protobuf.AbstractParser<Authorizations>() {
214       public Authorizations parsePartialFrom(
215           com.google.protobuf.CodedInputStream input,
216           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
217           throws com.google.protobuf.InvalidProtocolBufferException {
218         return new Authorizations(input, extensionRegistry);
219       }
220     };
221 
222     @java.lang.Override
223     public com.google.protobuf.Parser<Authorizations> getParserForType() {
224       return PARSER;
225     }
226 
227     // repeated string label = 1;
228     public static final int LABEL_FIELD_NUMBER = 1;
229     private com.google.protobuf.LazyStringList label_;
230     /**
231      * <code>repeated string label = 1;</code>
232      */
233     public java.util.List<java.lang.String>
234         getLabelList() {
235       return label_;
236     }
237     /**
238      * <code>repeated string label = 1;</code>
239      */
240     public int getLabelCount() {
241       return label_.size();
242     }
243     /**
244      * <code>repeated string label = 1;</code>
245      */
246     public java.lang.String getLabel(int index) {
247       return label_.get(index);
248     }
249     /**
250      * <code>repeated string label = 1;</code>
251      */
252     public com.google.protobuf.ByteString
253         getLabelBytes(int index) {
254       return label_.getByteString(index);
255     }
256 
257     private void initFields() {
258       label_ = com.google.protobuf.LazyStringArrayList.EMPTY;
259     }
260     private byte memoizedIsInitialized = -1;
261     public final boolean isInitialized() {
262       byte isInitialized = memoizedIsInitialized;
263       if (isInitialized != -1) return isInitialized == 1;
264 
265       memoizedIsInitialized = 1;
266       return true;
267     }
268 
269     public void writeTo(com.google.protobuf.CodedOutputStream output)
270                         throws java.io.IOException {
271       getSerializedSize();
272       for (int i = 0; i < label_.size(); i++) {
273         output.writeBytes(1, label_.getByteString(i));
274       }
275       getUnknownFields().writeTo(output);
276     }
277 
278     private int memoizedSerializedSize = -1;
279     public int getSerializedSize() {
280       int size = memoizedSerializedSize;
281       if (size != -1) return size;
282 
283       size = 0;
284       {
285         int dataSize = 0;
286         for (int i = 0; i < label_.size(); i++) {
287           dataSize += com.google.protobuf.CodedOutputStream
288             .computeBytesSizeNoTag(label_.getByteString(i));
289         }
290         size += dataSize;
291         size += 1 * getLabelList().size();
292       }
293       size += getUnknownFields().getSerializedSize();
294       memoizedSerializedSize = size;
295       return size;
296     }
297 
298     private static final long serialVersionUID = 0L;
299     @java.lang.Override
300     protected java.lang.Object writeReplace()
301         throws java.io.ObjectStreamException {
302       return super.writeReplace();
303     }
304 
305     @java.lang.Override
306     public boolean equals(final java.lang.Object obj) {
307       if (obj == this) {
308        return true;
309       }
310       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations)) {
311         return super.equals(obj);
312       }
313       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations) obj;
314 
315       boolean result = true;
316       result = result && getLabelList()
317           .equals(other.getLabelList());
318       result = result &&
319           getUnknownFields().equals(other.getUnknownFields());
320       return result;
321     }
322 
323     private int memoizedHashCode = 0;
324     @java.lang.Override
325     public int hashCode() {
326       if (memoizedHashCode != 0) {
327         return memoizedHashCode;
328       }
329       int hash = 41;
330       hash = (19 * hash) + getDescriptorForType().hashCode();
331       if (getLabelCount() > 0) {
332         hash = (37 * hash) + LABEL_FIELD_NUMBER;
333         hash = (53 * hash) + getLabelList().hashCode();
334       }
335       hash = (29 * hash) + getUnknownFields().hashCode();
336       memoizedHashCode = hash;
337       return hash;
338     }
339 
340     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(
341         com.google.protobuf.ByteString data)
342         throws com.google.protobuf.InvalidProtocolBufferException {
343       return PARSER.parseFrom(data);
344     }
345     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(
346         com.google.protobuf.ByteString data,
347         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
348         throws com.google.protobuf.InvalidProtocolBufferException {
349       return PARSER.parseFrom(data, extensionRegistry);
350     }
351     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(byte[] data)
352         throws com.google.protobuf.InvalidProtocolBufferException {
353       return PARSER.parseFrom(data);
354     }
355     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(
356         byte[] data,
357         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
358         throws com.google.protobuf.InvalidProtocolBufferException {
359       return PARSER.parseFrom(data, extensionRegistry);
360     }
361     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(java.io.InputStream input)
362         throws java.io.IOException {
363       return PARSER.parseFrom(input);
364     }
365     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(
366         java.io.InputStream input,
367         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
368         throws java.io.IOException {
369       return PARSER.parseFrom(input, extensionRegistry);
370     }
371     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseDelimitedFrom(java.io.InputStream input)
372         throws java.io.IOException {
373       return PARSER.parseDelimitedFrom(input);
374     }
375     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseDelimitedFrom(
376         java.io.InputStream input,
377         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
378         throws java.io.IOException {
379       return PARSER.parseDelimitedFrom(input, extensionRegistry);
380     }
381     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(
382         com.google.protobuf.CodedInputStream input)
383         throws java.io.IOException {
384       return PARSER.parseFrom(input);
385     }
386     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(
387         com.google.protobuf.CodedInputStream input,
388         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
389         throws java.io.IOException {
390       return PARSER.parseFrom(input, extensionRegistry);
391     }
392 
393     public static Builder newBuilder() { return Builder.create(); }
394     public Builder newBuilderForType() { return newBuilder(); }
395     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations prototype) {
396       return newBuilder().mergeFrom(prototype);
397     }
398     public Builder toBuilder() { return newBuilder(this); }
399 
400     @java.lang.Override
401     protected Builder newBuilderForType(
402         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
403       Builder builder = new Builder(parent);
404       return builder;
405     }
406     /**
407      * Protobuf type {@code Authorizations}
408      *
409      * <pre>
410      **
411      * The protocol buffer version of Authorizations.
412      * </pre>
413      */
414     public static final class Builder extends
415         com.google.protobuf.GeneratedMessage.Builder<Builder>
416        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.AuthorizationsOrBuilder {
417       public static final com.google.protobuf.Descriptors.Descriptor
418           getDescriptor() {
419         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Authorizations_descriptor;
420       }
421 
422       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
423           internalGetFieldAccessorTable() {
424         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Authorizations_fieldAccessorTable
425             .ensureFieldAccessorsInitialized(
426                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.Builder.class);
427       }
428 
429       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.newBuilder()
430       private Builder() {
431         maybeForceBuilderInitialization();
432       }
433 
434       private Builder(
435           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
436         super(parent);
437         maybeForceBuilderInitialization();
438       }
439       private void maybeForceBuilderInitialization() {
440         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
441         }
442       }
443       private static Builder create() {
444         return new Builder();
445       }
446 
447       public Builder clear() {
448         super.clear();
449         label_ = com.google.protobuf.LazyStringArrayList.EMPTY;
450         bitField0_ = (bitField0_ & ~0x00000001);
451         return this;
452       }
453 
454       public Builder clone() {
455         return create().mergeFrom(buildPartial());
456       }
457 
458       public com.google.protobuf.Descriptors.Descriptor
459           getDescriptorForType() {
460         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Authorizations_descriptor;
461       }
462 
463       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations getDefaultInstanceForType() {
464         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.getDefaultInstance();
465       }
466 
467       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations build() {
468         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations result = buildPartial();
469         if (!result.isInitialized()) {
470           throw newUninitializedMessageException(result);
471         }
472         return result;
473       }
474 
475       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations buildPartial() {
476         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations(this);
477         int from_bitField0_ = bitField0_;
478         if (((bitField0_ & 0x00000001) == 0x00000001)) {
479           label_ = new com.google.protobuf.UnmodifiableLazyStringList(
480               label_);
481           bitField0_ = (bitField0_ & ~0x00000001);
482         }
483         result.label_ = label_;
484         onBuilt();
485         return result;
486       }
487 
488       public Builder mergeFrom(com.google.protobuf.Message other) {
489         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations) {
490           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations)other);
491         } else {
492           super.mergeFrom(other);
493           return this;
494         }
495       }
496 
497       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations other) {
498         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.getDefaultInstance()) return this;
499         if (!other.label_.isEmpty()) {
500           if (label_.isEmpty()) {
501             label_ = other.label_;
502             bitField0_ = (bitField0_ & ~0x00000001);
503           } else {
504             ensureLabelIsMutable();
505             label_.addAll(other.label_);
506           }
507           onChanged();
508         }
509         this.mergeUnknownFields(other.getUnknownFields());
510         return this;
511       }
512 
513       public final boolean isInitialized() {
514         return true;
515       }
516 
517       public Builder mergeFrom(
518           com.google.protobuf.CodedInputStream input,
519           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
520           throws java.io.IOException {
521         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parsedMessage = null;
522         try {
523           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
524         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
525           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations) e.getUnfinishedMessage();
526           throw e;
527         } finally {
528           if (parsedMessage != null) {
529             mergeFrom(parsedMessage);
530           }
531         }
532         return this;
533       }
534       private int bitField0_;
535 
536       // repeated string label = 1;
537       private com.google.protobuf.LazyStringList label_ = com.google.protobuf.LazyStringArrayList.EMPTY;
538       private void ensureLabelIsMutable() {
539         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
540           label_ = new com.google.protobuf.LazyStringArrayList(label_);
541           bitField0_ |= 0x00000001;
542          }
543       }
544       /**
545        * <code>repeated string label = 1;</code>
546        */
547       public java.util.List<java.lang.String>
548           getLabelList() {
549         return java.util.Collections.unmodifiableList(label_);
550       }
551       /**
552        * <code>repeated string label = 1;</code>
553        */
554       public int getLabelCount() {
555         return label_.size();
556       }
557       /**
558        * <code>repeated string label = 1;</code>
559        */
560       public java.lang.String getLabel(int index) {
561         return label_.get(index);
562       }
563       /**
564        * <code>repeated string label = 1;</code>
565        */
566       public com.google.protobuf.ByteString
567           getLabelBytes(int index) {
568         return label_.getByteString(index);
569       }
570       /**
571        * <code>repeated string label = 1;</code>
572        */
573       public Builder setLabel(
574           int index, java.lang.String value) {
575         if (value == null) {
576     throw new NullPointerException();
577   }
578   ensureLabelIsMutable();
579         label_.set(index, value);
580         onChanged();
581         return this;
582       }
583       /**
584        * <code>repeated string label = 1;</code>
585        */
586       public Builder addLabel(
587           java.lang.String value) {
588         if (value == null) {
589     throw new NullPointerException();
590   }
591   ensureLabelIsMutable();
592         label_.add(value);
593         onChanged();
594         return this;
595       }
596       /**
597        * <code>repeated string label = 1;</code>
598        */
599       public Builder addAllLabel(
600           java.lang.Iterable<java.lang.String> values) {
601         ensureLabelIsMutable();
602         super.addAll(values, label_);
603         onChanged();
604         return this;
605       }
606       /**
607        * <code>repeated string label = 1;</code>
608        */
609       public Builder clearLabel() {
610         label_ = com.google.protobuf.LazyStringArrayList.EMPTY;
611         bitField0_ = (bitField0_ & ~0x00000001);
612         onChanged();
613         return this;
614       }
615       /**
616        * <code>repeated string label = 1;</code>
617        */
618       public Builder addLabelBytes(
619           com.google.protobuf.ByteString value) {
620         if (value == null) {
621     throw new NullPointerException();
622   }
623   ensureLabelIsMutable();
624         label_.add(value);
625         onChanged();
626         return this;
627       }
628 
629       // @@protoc_insertion_point(builder_scope:Authorizations)
630     }
631 
632     static {
633       defaultInstance = new Authorizations(true);
634       defaultInstance.initFields();
635     }
636 
637     // @@protoc_insertion_point(class_scope:Authorizations)
638   }
639 
640   public interface CellVisibilityOrBuilder
641       extends com.google.protobuf.MessageOrBuilder {
642 
643     // required string expression = 1;
644     /**
645      * <code>required string expression = 1;</code>
646      */
647     boolean hasExpression();
648     /**
649      * <code>required string expression = 1;</code>
650      */
651     java.lang.String getExpression();
652     /**
653      * <code>required string expression = 1;</code>
654      */
655     com.google.protobuf.ByteString
656         getExpressionBytes();
657   }
658   /**
659    * Protobuf type {@code CellVisibility}
660    *
661    * <pre>
662    **
663    * The protocol buffer version of CellVisibility.
664    * </pre>
665    */
666   public static final class CellVisibility extends
667       com.google.protobuf.GeneratedMessage
668       implements CellVisibilityOrBuilder {
669     // Use CellVisibility.newBuilder() to construct.
670     private CellVisibility(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
671       super(builder);
672       this.unknownFields = builder.getUnknownFields();
673     }
674     private CellVisibility(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
675 
676     private static final CellVisibility defaultInstance;
677     public static CellVisibility getDefaultInstance() {
678       return defaultInstance;
679     }
680 
681     public CellVisibility getDefaultInstanceForType() {
682       return defaultInstance;
683     }
684 
685     private final com.google.protobuf.UnknownFieldSet unknownFields;
686     @java.lang.Override
687     public final com.google.protobuf.UnknownFieldSet
688         getUnknownFields() {
689       return this.unknownFields;
690     }
691     private CellVisibility(
692         com.google.protobuf.CodedInputStream input,
693         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
694         throws com.google.protobuf.InvalidProtocolBufferException {
695       initFields();
696       int mutable_bitField0_ = 0;
697       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
698           com.google.protobuf.UnknownFieldSet.newBuilder();
699       try {
700         boolean done = false;
701         while (!done) {
702           int tag = input.readTag();
703           switch (tag) {
704             case 0:
705               done = true;
706               break;
707             default: {
708               if (!parseUnknownField(input, unknownFields,
709                                      extensionRegistry, tag)) {
710                 done = true;
711               }
712               break;
713             }
714             case 10: {
715               bitField0_ |= 0x00000001;
716               expression_ = input.readBytes();
717               break;
718             }
719           }
720         }
721       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
722         throw e.setUnfinishedMessage(this);
723       } catch (java.io.IOException e) {
724         throw new com.google.protobuf.InvalidProtocolBufferException(
725             e.getMessage()).setUnfinishedMessage(this);
726       } finally {
727         this.unknownFields = unknownFields.build();
728         makeExtensionsImmutable();
729       }
730     }
731     public static final com.google.protobuf.Descriptors.Descriptor
732         getDescriptor() {
733       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CellVisibility_descriptor;
734     }
735 
736     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
737         internalGetFieldAccessorTable() {
738       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CellVisibility_fieldAccessorTable
739           .ensureFieldAccessorsInitialized(
740               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.Builder.class);
741     }
742 
743     public static com.google.protobuf.Parser<CellVisibility> PARSER =
744         new com.google.protobuf.AbstractParser<CellVisibility>() {
745       public CellVisibility parsePartialFrom(
746           com.google.protobuf.CodedInputStream input,
747           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
748           throws com.google.protobuf.InvalidProtocolBufferException {
749         return new CellVisibility(input, extensionRegistry);
750       }
751     };
752 
753     @java.lang.Override
754     public com.google.protobuf.Parser<CellVisibility> getParserForType() {
755       return PARSER;
756     }
757 
758     private int bitField0_;
759     // required string expression = 1;
760     public static final int EXPRESSION_FIELD_NUMBER = 1;
761     private java.lang.Object expression_;
762     /**
763      * <code>required string expression = 1;</code>
764      */
765     public boolean hasExpression() {
766       return ((bitField0_ & 0x00000001) == 0x00000001);
767     }
768     /**
769      * <code>required string expression = 1;</code>
770      */
771     public java.lang.String getExpression() {
772       java.lang.Object ref = expression_;
773       if (ref instanceof java.lang.String) {
774         return (java.lang.String) ref;
775       } else {
776         com.google.protobuf.ByteString bs = 
777             (com.google.protobuf.ByteString) ref;
778         java.lang.String s = bs.toStringUtf8();
779         if (bs.isValidUtf8()) {
780           expression_ = s;
781         }
782         return s;
783       }
784     }
785     /**
786      * <code>required string expression = 1;</code>
787      */
788     public com.google.protobuf.ByteString
789         getExpressionBytes() {
790       java.lang.Object ref = expression_;
791       if (ref instanceof java.lang.String) {
792         com.google.protobuf.ByteString b = 
793             com.google.protobuf.ByteString.copyFromUtf8(
794                 (java.lang.String) ref);
795         expression_ = b;
796         return b;
797       } else {
798         return (com.google.protobuf.ByteString) ref;
799       }
800     }
801 
802     private void initFields() {
803       expression_ = "";
804     }
805     private byte memoizedIsInitialized = -1;
806     public final boolean isInitialized() {
807       byte isInitialized = memoizedIsInitialized;
808       if (isInitialized != -1) return isInitialized == 1;
809 
810       if (!hasExpression()) {
811         memoizedIsInitialized = 0;
812         return false;
813       }
814       memoizedIsInitialized = 1;
815       return true;
816     }
817 
818     public void writeTo(com.google.protobuf.CodedOutputStream output)
819                         throws java.io.IOException {
820       getSerializedSize();
821       if (((bitField0_ & 0x00000001) == 0x00000001)) {
822         output.writeBytes(1, getExpressionBytes());
823       }
824       getUnknownFields().writeTo(output);
825     }
826 
827     private int memoizedSerializedSize = -1;
828     public int getSerializedSize() {
829       int size = memoizedSerializedSize;
830       if (size != -1) return size;
831 
832       size = 0;
833       if (((bitField0_ & 0x00000001) == 0x00000001)) {
834         size += com.google.protobuf.CodedOutputStream
835           .computeBytesSize(1, getExpressionBytes());
836       }
837       size += getUnknownFields().getSerializedSize();
838       memoizedSerializedSize = size;
839       return size;
840     }
841 
842     private static final long serialVersionUID = 0L;
843     @java.lang.Override
844     protected java.lang.Object writeReplace()
845         throws java.io.ObjectStreamException {
846       return super.writeReplace();
847     }
848 
849     @java.lang.Override
850     public boolean equals(final java.lang.Object obj) {
851       if (obj == this) {
852        return true;
853       }
854       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility)) {
855         return super.equals(obj);
856       }
857       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility) obj;
858 
859       boolean result = true;
860       result = result && (hasExpression() == other.hasExpression());
861       if (hasExpression()) {
862         result = result && getExpression()
863             .equals(other.getExpression());
864       }
865       result = result &&
866           getUnknownFields().equals(other.getUnknownFields());
867       return result;
868     }
869 
870     private int memoizedHashCode = 0;
871     @java.lang.Override
872     public int hashCode() {
873       if (memoizedHashCode != 0) {
874         return memoizedHashCode;
875       }
876       int hash = 41;
877       hash = (19 * hash) + getDescriptorForType().hashCode();
878       if (hasExpression()) {
879         hash = (37 * hash) + EXPRESSION_FIELD_NUMBER;
880         hash = (53 * hash) + getExpression().hashCode();
881       }
882       hash = (29 * hash) + getUnknownFields().hashCode();
883       memoizedHashCode = hash;
884       return hash;
885     }
886 
887     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(
888         com.google.protobuf.ByteString data)
889         throws com.google.protobuf.InvalidProtocolBufferException {
890       return PARSER.parseFrom(data);
891     }
892     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(
893         com.google.protobuf.ByteString data,
894         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
895         throws com.google.protobuf.InvalidProtocolBufferException {
896       return PARSER.parseFrom(data, extensionRegistry);
897     }
898     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(byte[] data)
899         throws com.google.protobuf.InvalidProtocolBufferException {
900       return PARSER.parseFrom(data);
901     }
902     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(
903         byte[] data,
904         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
905         throws com.google.protobuf.InvalidProtocolBufferException {
906       return PARSER.parseFrom(data, extensionRegistry);
907     }
908     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(java.io.InputStream input)
909         throws java.io.IOException {
910       return PARSER.parseFrom(input);
911     }
912     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(
913         java.io.InputStream input,
914         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
915         throws java.io.IOException {
916       return PARSER.parseFrom(input, extensionRegistry);
917     }
918     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseDelimitedFrom(java.io.InputStream input)
919         throws java.io.IOException {
920       return PARSER.parseDelimitedFrom(input);
921     }
922     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseDelimitedFrom(
923         java.io.InputStream input,
924         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
925         throws java.io.IOException {
926       return PARSER.parseDelimitedFrom(input, extensionRegistry);
927     }
928     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(
929         com.google.protobuf.CodedInputStream input)
930         throws java.io.IOException {
931       return PARSER.parseFrom(input);
932     }
933     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(
934         com.google.protobuf.CodedInputStream input,
935         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
936         throws java.io.IOException {
937       return PARSER.parseFrom(input, extensionRegistry);
938     }
939 
940     public static Builder newBuilder() { return Builder.create(); }
941     public Builder newBuilderForType() { return newBuilder(); }
942     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility prototype) {
943       return newBuilder().mergeFrom(prototype);
944     }
945     public Builder toBuilder() { return newBuilder(this); }
946 
947     @java.lang.Override
948     protected Builder newBuilderForType(
949         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
950       Builder builder = new Builder(parent);
951       return builder;
952     }
953     /**
954      * Protobuf type {@code CellVisibility}
955      *
956      * <pre>
957      **
958      * The protocol buffer version of CellVisibility.
959      * </pre>
960      */
961     public static final class Builder extends
962         com.google.protobuf.GeneratedMessage.Builder<Builder>
963        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibilityOrBuilder {
964       public static final com.google.protobuf.Descriptors.Descriptor
965           getDescriptor() {
966         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CellVisibility_descriptor;
967       }
968 
969       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
970           internalGetFieldAccessorTable() {
971         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CellVisibility_fieldAccessorTable
972             .ensureFieldAccessorsInitialized(
973                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.Builder.class);
974       }
975 
976       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.newBuilder()
977       private Builder() {
978         maybeForceBuilderInitialization();
979       }
980 
981       private Builder(
982           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
983         super(parent);
984         maybeForceBuilderInitialization();
985       }
986       private void maybeForceBuilderInitialization() {
987         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
988         }
989       }
990       private static Builder create() {
991         return new Builder();
992       }
993 
994       public Builder clear() {
995         super.clear();
996         expression_ = "";
997         bitField0_ = (bitField0_ & ~0x00000001);
998         return this;
999       }
1000 
1001       public Builder clone() {
1002         return create().mergeFrom(buildPartial());
1003       }
1004 
1005       public com.google.protobuf.Descriptors.Descriptor
1006           getDescriptorForType() {
1007         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CellVisibility_descriptor;
1008       }
1009 
1010       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility getDefaultInstanceForType() {
1011         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.getDefaultInstance();
1012       }
1013 
1014       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility build() {
1015         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility result = buildPartial();
1016         if (!result.isInitialized()) {
1017           throw newUninitializedMessageException(result);
1018         }
1019         return result;
1020       }
1021 
1022       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility buildPartial() {
1023         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility(this);
1024         int from_bitField0_ = bitField0_;
1025         int to_bitField0_ = 0;
1026         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1027           to_bitField0_ |= 0x00000001;
1028         }
1029         result.expression_ = expression_;
1030         result.bitField0_ = to_bitField0_;
1031         onBuilt();
1032         return result;
1033       }
1034 
1035       public Builder mergeFrom(com.google.protobuf.Message other) {
1036         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility) {
1037           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility)other);
1038         } else {
1039           super.mergeFrom(other);
1040           return this;
1041         }
1042       }
1043 
1044       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility other) {
1045         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.getDefaultInstance()) return this;
1046         if (other.hasExpression()) {
1047           bitField0_ |= 0x00000001;
1048           expression_ = other.expression_;
1049           onChanged();
1050         }
1051         this.mergeUnknownFields(other.getUnknownFields());
1052         return this;
1053       }
1054 
1055       public final boolean isInitialized() {
1056         if (!hasExpression()) {
1057           
1058           return false;
1059         }
1060         return true;
1061       }
1062 
1063       public Builder mergeFrom(
1064           com.google.protobuf.CodedInputStream input,
1065           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1066           throws java.io.IOException {
1067         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parsedMessage = null;
1068         try {
1069           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1070         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1071           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility) e.getUnfinishedMessage();
1072           throw e;
1073         } finally {
1074           if (parsedMessage != null) {
1075             mergeFrom(parsedMessage);
1076           }
1077         }
1078         return this;
1079       }
1080       private int bitField0_;
1081 
1082       // required string expression = 1;
1083       private java.lang.Object expression_ = "";
1084       /**
1085        * <code>required string expression = 1;</code>
1086        */
1087       public boolean hasExpression() {
1088         return ((bitField0_ & 0x00000001) == 0x00000001);
1089       }
1090       /**
1091        * <code>required string expression = 1;</code>
1092        */
1093       public java.lang.String getExpression() {
1094         java.lang.Object ref = expression_;
1095         if (!(ref instanceof java.lang.String)) {
1096           java.lang.String s = ((com.google.protobuf.ByteString) ref)
1097               .toStringUtf8();
1098           expression_ = s;
1099           return s;
1100         } else {
1101           return (java.lang.String) ref;
1102         }
1103       }
1104       /**
1105        * <code>required string expression = 1;</code>
1106        */
1107       public com.google.protobuf.ByteString
1108           getExpressionBytes() {
1109         java.lang.Object ref = expression_;
1110         if (ref instanceof String) {
1111           com.google.protobuf.ByteString b = 
1112               com.google.protobuf.ByteString.copyFromUtf8(
1113                   (java.lang.String) ref);
1114           expression_ = b;
1115           return b;
1116         } else {
1117           return (com.google.protobuf.ByteString) ref;
1118         }
1119       }
1120       /**
1121        * <code>required string expression = 1;</code>
1122        */
1123       public Builder setExpression(
1124           java.lang.String value) {
1125         if (value == null) {
1126     throw new NullPointerException();
1127   }
1128   bitField0_ |= 0x00000001;
1129         expression_ = value;
1130         onChanged();
1131         return this;
1132       }
1133       /**
1134        * <code>required string expression = 1;</code>
1135        */
1136       public Builder clearExpression() {
1137         bitField0_ = (bitField0_ & ~0x00000001);
1138         expression_ = getDefaultInstance().getExpression();
1139         onChanged();
1140         return this;
1141       }
1142       /**
1143        * <code>required string expression = 1;</code>
1144        */
1145       public Builder setExpressionBytes(
1146           com.google.protobuf.ByteString value) {
1147         if (value == null) {
1148     throw new NullPointerException();
1149   }
1150   bitField0_ |= 0x00000001;
1151         expression_ = value;
1152         onChanged();
1153         return this;
1154       }
1155 
1156       // @@protoc_insertion_point(builder_scope:CellVisibility)
1157     }
1158 
1159     static {
1160       defaultInstance = new CellVisibility(true);
1161       defaultInstance.initFields();
1162     }
1163 
1164     // @@protoc_insertion_point(class_scope:CellVisibility)
1165   }
1166 
1167   public interface ColumnOrBuilder
1168       extends com.google.protobuf.MessageOrBuilder {
1169 
1170     // required bytes family = 1;
1171     /**
1172      * <code>required bytes family = 1;</code>
1173      */
1174     boolean hasFamily();
1175     /**
1176      * <code>required bytes family = 1;</code>
1177      */
1178     com.google.protobuf.ByteString getFamily();
1179 
1180     // repeated bytes qualifier = 2;
1181     /**
1182      * <code>repeated bytes qualifier = 2;</code>
1183      */
1184     java.util.List<com.google.protobuf.ByteString> getQualifierList();
1185     /**
1186      * <code>repeated bytes qualifier = 2;</code>
1187      */
1188     int getQualifierCount();
1189     /**
1190      * <code>repeated bytes qualifier = 2;</code>
1191      */
1192     com.google.protobuf.ByteString getQualifier(int index);
1193   }
1194   /**
1195    * Protobuf type {@code Column}
1196    *
1197    * <pre>
1198    **
1199    * Container for a list of column qualifier names of a family.
1200    * </pre>
1201    */
1202   public static final class Column extends
1203       com.google.protobuf.GeneratedMessage
1204       implements ColumnOrBuilder {
1205     // Use Column.newBuilder() to construct.
1206     private Column(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1207       super(builder);
1208       this.unknownFields = builder.getUnknownFields();
1209     }
1210     private Column(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1211 
1212     private static final Column defaultInstance;
1213     public static Column getDefaultInstance() {
1214       return defaultInstance;
1215     }
1216 
1217     public Column getDefaultInstanceForType() {
1218       return defaultInstance;
1219     }
1220 
1221     private final com.google.protobuf.UnknownFieldSet unknownFields;
1222     @java.lang.Override
1223     public final com.google.protobuf.UnknownFieldSet
1224         getUnknownFields() {
1225       return this.unknownFields;
1226     }
1227     private Column(
1228         com.google.protobuf.CodedInputStream input,
1229         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1230         throws com.google.protobuf.InvalidProtocolBufferException {
1231       initFields();
1232       int mutable_bitField0_ = 0;
1233       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1234           com.google.protobuf.UnknownFieldSet.newBuilder();
1235       try {
1236         boolean done = false;
1237         while (!done) {
1238           int tag = input.readTag();
1239           switch (tag) {
1240             case 0:
1241               done = true;
1242               break;
1243             default: {
1244               if (!parseUnknownField(input, unknownFields,
1245                                      extensionRegistry, tag)) {
1246                 done = true;
1247               }
1248               break;
1249             }
1250             case 10: {
1251               bitField0_ |= 0x00000001;
1252               family_ = input.readBytes();
1253               break;
1254             }
1255             case 18: {
1256               if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
1257                 qualifier_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
1258                 mutable_bitField0_ |= 0x00000002;
1259               }
1260               qualifier_.add(input.readBytes());
1261               break;
1262             }
1263           }
1264         }
1265       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1266         throw e.setUnfinishedMessage(this);
1267       } catch (java.io.IOException e) {
1268         throw new com.google.protobuf.InvalidProtocolBufferException(
1269             e.getMessage()).setUnfinishedMessage(this);
1270       } finally {
1271         if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
1272           qualifier_ = java.util.Collections.unmodifiableList(qualifier_);
1273         }
1274         this.unknownFields = unknownFields.build();
1275         makeExtensionsImmutable();
1276       }
1277     }
1278     public static final com.google.protobuf.Descriptors.Descriptor
1279         getDescriptor() {
1280       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_descriptor;
1281     }
1282 
1283     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1284         internalGetFieldAccessorTable() {
1285       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_fieldAccessorTable
1286           .ensureFieldAccessorsInitialized(
1287               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder.class);
1288     }
1289 
1290     public static com.google.protobuf.Parser<Column> PARSER =
1291         new com.google.protobuf.AbstractParser<Column>() {
1292       public Column parsePartialFrom(
1293           com.google.protobuf.CodedInputStream input,
1294           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1295           throws com.google.protobuf.InvalidProtocolBufferException {
1296         return new Column(input, extensionRegistry);
1297       }
1298     };
1299 
1300     @java.lang.Override
1301     public com.google.protobuf.Parser<Column> getParserForType() {
1302       return PARSER;
1303     }
1304 
1305     private int bitField0_;
1306     // required bytes family = 1;
1307     public static final int FAMILY_FIELD_NUMBER = 1;
1308     private com.google.protobuf.ByteString family_;
1309     /**
1310      * <code>required bytes family = 1;</code>
1311      */
1312     public boolean hasFamily() {
1313       return ((bitField0_ & 0x00000001) == 0x00000001);
1314     }
1315     /**
1316      * <code>required bytes family = 1;</code>
1317      */
1318     public com.google.protobuf.ByteString getFamily() {
1319       return family_;
1320     }
1321 
1322     // repeated bytes qualifier = 2;
1323     public static final int QUALIFIER_FIELD_NUMBER = 2;
1324     private java.util.List<com.google.protobuf.ByteString> qualifier_;
1325     /**
1326      * <code>repeated bytes qualifier = 2;</code>
1327      */
1328     public java.util.List<com.google.protobuf.ByteString>
1329         getQualifierList() {
1330       return qualifier_;
1331     }
1332     /**
1333      * <code>repeated bytes qualifier = 2;</code>
1334      */
1335     public int getQualifierCount() {
1336       return qualifier_.size();
1337     }
1338     /**
1339      * <code>repeated bytes qualifier = 2;</code>
1340      */
1341     public com.google.protobuf.ByteString getQualifier(int index) {
1342       return qualifier_.get(index);
1343     }
1344 
1345     private void initFields() {
1346       family_ = com.google.protobuf.ByteString.EMPTY;
1347       qualifier_ = java.util.Collections.emptyList();
1348     }
1349     private byte memoizedIsInitialized = -1;
1350     public final boolean isInitialized() {
1351       byte isInitialized = memoizedIsInitialized;
1352       if (isInitialized != -1) return isInitialized == 1;
1353 
1354       if (!hasFamily()) {
1355         memoizedIsInitialized = 0;
1356         return false;
1357       }
1358       memoizedIsInitialized = 1;
1359       return true;
1360     }
1361 
1362     public void writeTo(com.google.protobuf.CodedOutputStream output)
1363                         throws java.io.IOException {
1364       getSerializedSize();
1365       if (((bitField0_ & 0x00000001) == 0x00000001)) {
1366         output.writeBytes(1, family_);
1367       }
1368       for (int i = 0; i < qualifier_.size(); i++) {
1369         output.writeBytes(2, qualifier_.get(i));
1370       }
1371       getUnknownFields().writeTo(output);
1372     }
1373 
1374     private int memoizedSerializedSize = -1;
1375     public int getSerializedSize() {
1376       int size = memoizedSerializedSize;
1377       if (size != -1) return size;
1378 
1379       size = 0;
1380       if (((bitField0_ & 0x00000001) == 0x00000001)) {
1381         size += com.google.protobuf.CodedOutputStream
1382           .computeBytesSize(1, family_);
1383       }
1384       {
1385         int dataSize = 0;
1386         for (int i = 0; i < qualifier_.size(); i++) {
1387           dataSize += com.google.protobuf.CodedOutputStream
1388             .computeBytesSizeNoTag(qualifier_.get(i));
1389         }
1390         size += dataSize;
1391         size += 1 * getQualifierList().size();
1392       }
1393       size += getUnknownFields().getSerializedSize();
1394       memoizedSerializedSize = size;
1395       return size;
1396     }
1397 
1398     private static final long serialVersionUID = 0L;
1399     @java.lang.Override
1400     protected java.lang.Object writeReplace()
1401         throws java.io.ObjectStreamException {
1402       return super.writeReplace();
1403     }
1404 
1405     @java.lang.Override
1406     public boolean equals(final java.lang.Object obj) {
1407       if (obj == this) {
1408        return true;
1409       }
1410       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column)) {
1411         return super.equals(obj);
1412       }
1413       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) obj;
1414 
1415       boolean result = true;
1416       result = result && (hasFamily() == other.hasFamily());
1417       if (hasFamily()) {
1418         result = result && getFamily()
1419             .equals(other.getFamily());
1420       }
1421       result = result && getQualifierList()
1422           .equals(other.getQualifierList());
1423       result = result &&
1424           getUnknownFields().equals(other.getUnknownFields());
1425       return result;
1426     }
1427 
1428     private int memoizedHashCode = 0;
1429     @java.lang.Override
1430     public int hashCode() {
1431       if (memoizedHashCode != 0) {
1432         return memoizedHashCode;
1433       }
1434       int hash = 41;
1435       hash = (19 * hash) + getDescriptorForType().hashCode();
1436       if (hasFamily()) {
1437         hash = (37 * hash) + FAMILY_FIELD_NUMBER;
1438         hash = (53 * hash) + getFamily().hashCode();
1439       }
1440       if (getQualifierCount() > 0) {
1441         hash = (37 * hash) + QUALIFIER_FIELD_NUMBER;
1442         hash = (53 * hash) + getQualifierList().hashCode();
1443       }
1444       hash = (29 * hash) + getUnknownFields().hashCode();
1445       memoizedHashCode = hash;
1446       return hash;
1447     }
1448 
1449     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(
1450         com.google.protobuf.ByteString data)
1451         throws com.google.protobuf.InvalidProtocolBufferException {
1452       return PARSER.parseFrom(data);
1453     }
1454     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(
1455         com.google.protobuf.ByteString data,
1456         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1457         throws com.google.protobuf.InvalidProtocolBufferException {
1458       return PARSER.parseFrom(data, extensionRegistry);
1459     }
1460     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(byte[] data)
1461         throws com.google.protobuf.InvalidProtocolBufferException {
1462       return PARSER.parseFrom(data);
1463     }
1464     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(
1465         byte[] data,
1466         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1467         throws com.google.protobuf.InvalidProtocolBufferException {
1468       return PARSER.parseFrom(data, extensionRegistry);
1469     }
1470     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(java.io.InputStream input)
1471         throws java.io.IOException {
1472       return PARSER.parseFrom(input);
1473     }
1474     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(
1475         java.io.InputStream input,
1476         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1477         throws java.io.IOException {
1478       return PARSER.parseFrom(input, extensionRegistry);
1479     }
1480     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseDelimitedFrom(java.io.InputStream input)
1481         throws java.io.IOException {
1482       return PARSER.parseDelimitedFrom(input);
1483     }
1484     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseDelimitedFrom(
1485         java.io.InputStream input,
1486         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1487         throws java.io.IOException {
1488       return PARSER.parseDelimitedFrom(input, extensionRegistry);
1489     }
1490     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(
1491         com.google.protobuf.CodedInputStream input)
1492         throws java.io.IOException {
1493       return PARSER.parseFrom(input);
1494     }
1495     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(
1496         com.google.protobuf.CodedInputStream input,
1497         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1498         throws java.io.IOException {
1499       return PARSER.parseFrom(input, extensionRegistry);
1500     }
1501 
1502     public static Builder newBuilder() { return Builder.create(); }
1503     public Builder newBuilderForType() { return newBuilder(); }
1504     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column prototype) {
1505       return newBuilder().mergeFrom(prototype);
1506     }
1507     public Builder toBuilder() { return newBuilder(this); }
1508 
1509     @java.lang.Override
1510     protected Builder newBuilderForType(
1511         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1512       Builder builder = new Builder(parent);
1513       return builder;
1514     }
1515     /**
1516      * Protobuf type {@code Column}
1517      *
1518      * <pre>
1519      **
1520      * Container for a list of column qualifier names of a family.
1521      * </pre>
1522      */
1523     public static final class Builder extends
1524         com.google.protobuf.GeneratedMessage.Builder<Builder>
1525        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder {
1526       public static final com.google.protobuf.Descriptors.Descriptor
1527           getDescriptor() {
1528         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_descriptor;
1529       }
1530 
1531       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1532           internalGetFieldAccessorTable() {
1533         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_fieldAccessorTable
1534             .ensureFieldAccessorsInitialized(
1535                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder.class);
1536       }
1537 
1538       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.newBuilder()
1539       private Builder() {
1540         maybeForceBuilderInitialization();
1541       }
1542 
1543       private Builder(
1544           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1545         super(parent);
1546         maybeForceBuilderInitialization();
1547       }
1548       private void maybeForceBuilderInitialization() {
1549         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1550         }
1551       }
1552       private static Builder create() {
1553         return new Builder();
1554       }
1555 
1556       public Builder clear() {
1557         super.clear();
1558         family_ = com.google.protobuf.ByteString.EMPTY;
1559         bitField0_ = (bitField0_ & ~0x00000001);
1560         qualifier_ = java.util.Collections.emptyList();
1561         bitField0_ = (bitField0_ & ~0x00000002);
1562         return this;
1563       }
1564 
1565       public Builder clone() {
1566         return create().mergeFrom(buildPartial());
1567       }
1568 
1569       public com.google.protobuf.Descriptors.Descriptor
1570           getDescriptorForType() {
1571         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_descriptor;
1572       }
1573 
1574       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getDefaultInstanceForType() {
1575         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance();
1576       }
1577 
1578       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column build() {
1579         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = buildPartial();
1580         if (!result.isInitialized()) {
1581           throw newUninitializedMessageException(result);
1582         }
1583         return result;
1584       }
1585 
1586       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column buildPartial() {
1587         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column(this);
1588         int from_bitField0_ = bitField0_;
1589         int to_bitField0_ = 0;
1590         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1591           to_bitField0_ |= 0x00000001;
1592         }
1593         result.family_ = family_;
1594         if (((bitField0_ & 0x00000002) == 0x00000002)) {
1595           qualifier_ = java.util.Collections.unmodifiableList(qualifier_);
1596           bitField0_ = (bitField0_ & ~0x00000002);
1597         }
1598         result.qualifier_ = qualifier_;
1599         result.bitField0_ = to_bitField0_;
1600         onBuilt();
1601         return result;
1602       }
1603 
1604       public Builder mergeFrom(com.google.protobuf.Message other) {
1605         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) {
1606           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column)other);
1607         } else {
1608           super.mergeFrom(other);
1609           return this;
1610         }
1611       }
1612 
1613       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column other) {
1614         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()) return this;
1615         if (other.hasFamily()) {
1616           setFamily(other.getFamily());
1617         }
1618         if (!other.qualifier_.isEmpty()) {
1619           if (qualifier_.isEmpty()) {
1620             qualifier_ = other.qualifier_;
1621             bitField0_ = (bitField0_ & ~0x00000002);
1622           } else {
1623             ensureQualifierIsMutable();
1624             qualifier_.addAll(other.qualifier_);
1625           }
1626           onChanged();
1627         }
1628         this.mergeUnknownFields(other.getUnknownFields());
1629         return this;
1630       }
1631 
1632       public final boolean isInitialized() {
1633         if (!hasFamily()) {
1634           
1635           return false;
1636         }
1637         return true;
1638       }
1639 
1640       public Builder mergeFrom(
1641           com.google.protobuf.CodedInputStream input,
1642           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1643           throws java.io.IOException {
1644         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parsedMessage = null;
1645         try {
1646           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1647         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1648           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) e.getUnfinishedMessage();
1649           throw e;
1650         } finally {
1651           if (parsedMessage != null) {
1652             mergeFrom(parsedMessage);
1653           }
1654         }
1655         return this;
1656       }
1657       private int bitField0_;
1658 
1659       // required bytes family = 1;
1660       private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
1661       /**
1662        * <code>required bytes family = 1;</code>
1663        */
1664       public boolean hasFamily() {
1665         return ((bitField0_ & 0x00000001) == 0x00000001);
1666       }
1667       /**
1668        * <code>required bytes family = 1;</code>
1669        */
1670       public com.google.protobuf.ByteString getFamily() {
1671         return family_;
1672       }
1673       /**
1674        * <code>required bytes family = 1;</code>
1675        */
1676       public Builder setFamily(com.google.protobuf.ByteString value) {
1677         if (value == null) {
1678     throw new NullPointerException();
1679   }
1680   bitField0_ |= 0x00000001;
1681         family_ = value;
1682         onChanged();
1683         return this;
1684       }
1685       /**
1686        * <code>required bytes family = 1;</code>
1687        */
1688       public Builder clearFamily() {
1689         bitField0_ = (bitField0_ & ~0x00000001);
1690         family_ = getDefaultInstance().getFamily();
1691         onChanged();
1692         return this;
1693       }
1694 
1695       // repeated bytes qualifier = 2;
1696       private java.util.List<com.google.protobuf.ByteString> qualifier_ = java.util.Collections.emptyList();
1697       private void ensureQualifierIsMutable() {
1698         if (!((bitField0_ & 0x00000002) == 0x00000002)) {
1699           qualifier_ = new java.util.ArrayList<com.google.protobuf.ByteString>(qualifier_);
1700           bitField0_ |= 0x00000002;
1701          }
1702       }
1703       /**
1704        * <code>repeated bytes qualifier = 2;</code>
1705        */
1706       public java.util.List<com.google.protobuf.ByteString>
1707           getQualifierList() {
1708         return java.util.Collections.unmodifiableList(qualifier_);
1709       }
1710       /**
1711        * <code>repeated bytes qualifier = 2;</code>
1712        */
1713       public int getQualifierCount() {
1714         return qualifier_.size();
1715       }
1716       /**
1717        * <code>repeated bytes qualifier = 2;</code>
1718        */
1719       public com.google.protobuf.ByteString getQualifier(int index) {
1720         return qualifier_.get(index);
1721       }
1722       /**
1723        * <code>repeated bytes qualifier = 2;</code>
1724        */
1725       public Builder setQualifier(
1726           int index, com.google.protobuf.ByteString value) {
1727         if (value == null) {
1728     throw new NullPointerException();
1729   }
1730   ensureQualifierIsMutable();
1731         qualifier_.set(index, value);
1732         onChanged();
1733         return this;
1734       }
1735       /**
1736        * <code>repeated bytes qualifier = 2;</code>
1737        */
1738       public Builder addQualifier(com.google.protobuf.ByteString value) {
1739         if (value == null) {
1740     throw new NullPointerException();
1741   }
1742   ensureQualifierIsMutable();
1743         qualifier_.add(value);
1744         onChanged();
1745         return this;
1746       }
1747       /**
1748        * <code>repeated bytes qualifier = 2;</code>
1749        */
1750       public Builder addAllQualifier(
1751           java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
1752         ensureQualifierIsMutable();
1753         super.addAll(values, qualifier_);
1754         onChanged();
1755         return this;
1756       }
1757       /**
1758        * <code>repeated bytes qualifier = 2;</code>
1759        */
1760       public Builder clearQualifier() {
1761         qualifier_ = java.util.Collections.emptyList();
1762         bitField0_ = (bitField0_ & ~0x00000002);
1763         onChanged();
1764         return this;
1765       }
1766 
1767       // @@protoc_insertion_point(builder_scope:Column)
1768     }
1769 
1770     static {
1771       defaultInstance = new Column(true);
1772       defaultInstance.initFields();
1773     }
1774 
1775     // @@protoc_insertion_point(class_scope:Column)
1776   }
1777 
1778   public interface GetOrBuilder
1779       extends com.google.protobuf.MessageOrBuilder {
1780 
1781     // required bytes row = 1;
1782     /**
1783      * <code>required bytes row = 1;</code>
1784      */
1785     boolean hasRow();
1786     /**
1787      * <code>required bytes row = 1;</code>
1788      */
1789     com.google.protobuf.ByteString getRow();
1790 
1791     // repeated .Column column = 2;
1792     /**
1793      * <code>repeated .Column column = 2;</code>
1794      */
1795     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> 
1796         getColumnList();
1797     /**
1798      * <code>repeated .Column column = 2;</code>
1799      */
1800     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index);
1801     /**
1802      * <code>repeated .Column column = 2;</code>
1803      */
1804     int getColumnCount();
1805     /**
1806      * <code>repeated .Column column = 2;</code>
1807      */
1808     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> 
1809         getColumnOrBuilderList();
1810     /**
1811      * <code>repeated .Column column = 2;</code>
1812      */
1813     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder(
1814         int index);
1815 
1816     // repeated .NameBytesPair attribute = 3;
1817     /**
1818      * <code>repeated .NameBytesPair attribute = 3;</code>
1819      */
1820     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> 
1821         getAttributeList();
1822     /**
1823      * <code>repeated .NameBytesPair attribute = 3;</code>
1824      */
1825     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index);
1826     /**
1827      * <code>repeated .NameBytesPair attribute = 3;</code>
1828      */
1829     int getAttributeCount();
1830     /**
1831      * <code>repeated .NameBytesPair attribute = 3;</code>
1832      */
1833     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
1834         getAttributeOrBuilderList();
1835     /**
1836      * <code>repeated .NameBytesPair attribute = 3;</code>
1837      */
1838     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
1839         int index);
1840 
1841     // optional .Filter filter = 4;
1842     /**
1843      * <code>optional .Filter filter = 4;</code>
1844      */
1845     boolean hasFilter();
1846     /**
1847      * <code>optional .Filter filter = 4;</code>
1848      */
1849     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter();
1850     /**
1851      * <code>optional .Filter filter = 4;</code>
1852      */
1853     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder();
1854 
1855     // optional .TimeRange time_range = 5;
1856     /**
1857      * <code>optional .TimeRange time_range = 5;</code>
1858      */
1859     boolean hasTimeRange();
1860     /**
1861      * <code>optional .TimeRange time_range = 5;</code>
1862      */
1863     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange();
1864     /**
1865      * <code>optional .TimeRange time_range = 5;</code>
1866      */
1867     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder();
1868 
1869     // optional uint32 max_versions = 6 [default = 1];
1870     /**
1871      * <code>optional uint32 max_versions = 6 [default = 1];</code>
1872      */
1873     boolean hasMaxVersions();
1874     /**
1875      * <code>optional uint32 max_versions = 6 [default = 1];</code>
1876      */
1877     int getMaxVersions();
1878 
1879     // optional bool cache_blocks = 7 [default = true];
1880     /**
1881      * <code>optional bool cache_blocks = 7 [default = true];</code>
1882      */
1883     boolean hasCacheBlocks();
1884     /**
1885      * <code>optional bool cache_blocks = 7 [default = true];</code>
1886      */
1887     boolean getCacheBlocks();
1888 
1889     // optional uint32 store_limit = 8;
1890     /**
1891      * <code>optional uint32 store_limit = 8;</code>
1892      */
1893     boolean hasStoreLimit();
1894     /**
1895      * <code>optional uint32 store_limit = 8;</code>
1896      */
1897     int getStoreLimit();
1898 
1899     // optional uint32 store_offset = 9;
1900     /**
1901      * <code>optional uint32 store_offset = 9;</code>
1902      */
1903     boolean hasStoreOffset();
1904     /**
1905      * <code>optional uint32 store_offset = 9;</code>
1906      */
1907     int getStoreOffset();
1908 
1909     // optional bool existence_only = 10 [default = false];
1910     /**
1911      * <code>optional bool existence_only = 10 [default = false];</code>
1912      *
1913      * <pre>
1914      * The result isn't asked for, just check for
1915      * the existence.
1916      * </pre>
1917      */
1918     boolean hasExistenceOnly();
1919     /**
1920      * <code>optional bool existence_only = 10 [default = false];</code>
1921      *
1922      * <pre>
1923      * The result isn't asked for, just check for
1924      * the existence.
1925      * </pre>
1926      */
1927     boolean getExistenceOnly();
1928 
1929     // optional bool closest_row_before = 11 [default = false];
1930     /**
1931      * <code>optional bool closest_row_before = 11 [default = false];</code>
1932      *
1933      * <pre>
1934      * If the row to get doesn't exist, return the
1935      * closest row before.
1936      * </pre>
1937      */
1938     boolean hasClosestRowBefore();
1939     /**
1940      * <code>optional bool closest_row_before = 11 [default = false];</code>
1941      *
1942      * <pre>
1943      * If the row to get doesn't exist, return the
1944      * closest row before.
1945      * </pre>
1946      */
1947     boolean getClosestRowBefore();
1948 
1949     // optional .Consistency consistency = 12 [default = STRONG];
1950     /**
1951      * <code>optional .Consistency consistency = 12 [default = STRONG];</code>
1952      */
1953     boolean hasConsistency();
1954     /**
1955      * <code>optional .Consistency consistency = 12 [default = STRONG];</code>
1956      */
1957     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency();
1958   }
1959   /**
1960    * Protobuf type {@code Get}
1961    *
1962    * <pre>
1963    **
1964    * The protocol buffer version of Get.
1965    * Unless existence_only is specified, return all the requested data
1966    * for the row that matches exactly, or the one that immediately
1967    * precedes it if closest_row_before is specified.
1968    * </pre>
1969    */
1970   public static final class Get extends
1971       com.google.protobuf.GeneratedMessage
1972       implements GetOrBuilder {
1973     // Use Get.newBuilder() to construct.
1974     private Get(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1975       super(builder);
1976       this.unknownFields = builder.getUnknownFields();
1977     }
1978     private Get(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1979 
1980     private static final Get defaultInstance;
1981     public static Get getDefaultInstance() {
1982       return defaultInstance;
1983     }
1984 
1985     public Get getDefaultInstanceForType() {
1986       return defaultInstance;
1987     }
1988 
1989     private final com.google.protobuf.UnknownFieldSet unknownFields;
1990     @java.lang.Override
1991     public final com.google.protobuf.UnknownFieldSet
1992         getUnknownFields() {
1993       return this.unknownFields;
1994     }
1995     private Get(
1996         com.google.protobuf.CodedInputStream input,
1997         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1998         throws com.google.protobuf.InvalidProtocolBufferException {
1999       initFields();
2000       int mutable_bitField0_ = 0;
2001       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2002           com.google.protobuf.UnknownFieldSet.newBuilder();
2003       try {
2004         boolean done = false;
2005         while (!done) {
2006           int tag = input.readTag();
2007           switch (tag) {
2008             case 0:
2009               done = true;
2010               break;
2011             default: {
2012               if (!parseUnknownField(input, unknownFields,
2013                                      extensionRegistry, tag)) {
2014                 done = true;
2015               }
2016               break;
2017             }
2018             case 10: {
2019               bitField0_ |= 0x00000001;
2020               row_ = input.readBytes();
2021               break;
2022             }
2023             case 18: {
2024               if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
2025                 column_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column>();
2026                 mutable_bitField0_ |= 0x00000002;
2027               }
2028               column_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.PARSER, extensionRegistry));
2029               break;
2030             }
2031             case 26: {
2032               if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
2033                 attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>();
2034                 mutable_bitField0_ |= 0x00000004;
2035               }
2036               attribute_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry));
2037               break;
2038             }
2039             case 34: {
2040               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder subBuilder = null;
2041               if (((bitField0_ & 0x00000002) == 0x00000002)) {
2042                 subBuilder = filter_.toBuilder();
2043               }
2044               filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry);
2045               if (subBuilder != null) {
2046                 subBuilder.mergeFrom(filter_);
2047                 filter_ = subBuilder.buildPartial();
2048               }
2049               bitField0_ |= 0x00000002;
2050               break;
2051             }
2052             case 42: {
2053               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null;
2054               if (((bitField0_ & 0x00000004) == 0x00000004)) {
2055                 subBuilder = timeRange_.toBuilder();
2056               }
2057               timeRange_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry);
2058               if (subBuilder != null) {
2059                 subBuilder.mergeFrom(timeRange_);
2060                 timeRange_ = subBuilder.buildPartial();
2061               }
2062               bitField0_ |= 0x00000004;
2063               break;
2064             }
2065             case 48: {
2066               bitField0_ |= 0x00000008;
2067               maxVersions_ = input.readUInt32();
2068               break;
2069             }
2070             case 56: {
2071               bitField0_ |= 0x00000010;
2072               cacheBlocks_ = input.readBool();
2073               break;
2074             }
2075             case 64: {
2076               bitField0_ |= 0x00000020;
2077               storeLimit_ = input.readUInt32();
2078               break;
2079             }
2080             case 72: {
2081               bitField0_ |= 0x00000040;
2082               storeOffset_ = input.readUInt32();
2083               break;
2084             }
2085             case 80: {
2086               bitField0_ |= 0x00000080;
2087               existenceOnly_ = input.readBool();
2088               break;
2089             }
2090             case 88: {
2091               bitField0_ |= 0x00000100;
2092               closestRowBefore_ = input.readBool();
2093               break;
2094             }
2095             case 96: {
2096               int rawValue = input.readEnum();
2097               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.valueOf(rawValue);
2098               if (value == null) {
2099                 unknownFields.mergeVarintField(12, rawValue);
2100               } else {
2101                 bitField0_ |= 0x00000200;
2102                 consistency_ = value;
2103               }
2104               break;
2105             }
2106           }
2107         }
2108       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2109         throw e.setUnfinishedMessage(this);
2110       } catch (java.io.IOException e) {
2111         throw new com.google.protobuf.InvalidProtocolBufferException(
2112             e.getMessage()).setUnfinishedMessage(this);
2113       } finally {
2114         if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
2115           column_ = java.util.Collections.unmodifiableList(column_);
2116         }
2117         if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
2118           attribute_ = java.util.Collections.unmodifiableList(attribute_);
2119         }
2120         this.unknownFields = unknownFields.build();
2121         makeExtensionsImmutable();
2122       }
2123     }
2124     public static final com.google.protobuf.Descriptors.Descriptor
2125         getDescriptor() {
2126       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_descriptor;
2127     }
2128 
2129     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2130         internalGetFieldAccessorTable() {
2131       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_fieldAccessorTable
2132           .ensureFieldAccessorsInitialized(
2133               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder.class);
2134     }
2135 
2136     public static com.google.protobuf.Parser<Get> PARSER =
2137         new com.google.protobuf.AbstractParser<Get>() {
2138       public Get parsePartialFrom(
2139           com.google.protobuf.CodedInputStream input,
2140           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2141           throws com.google.protobuf.InvalidProtocolBufferException {
2142         return new Get(input, extensionRegistry);
2143       }
2144     };
2145 
2146     @java.lang.Override
2147     public com.google.protobuf.Parser<Get> getParserForType() {
2148       return PARSER;
2149     }
2150 
2151     private int bitField0_;
2152     // required bytes row = 1;
2153     public static final int ROW_FIELD_NUMBER = 1;
2154     private com.google.protobuf.ByteString row_;
2155     /**
2156      * <code>required bytes row = 1;</code>
2157      */
2158     public boolean hasRow() {
2159       return ((bitField0_ & 0x00000001) == 0x00000001);
2160     }
2161     /**
2162      * <code>required bytes row = 1;</code>
2163      */
2164     public com.google.protobuf.ByteString getRow() {
2165       return row_;
2166     }
2167 
2168     // repeated .Column column = 2;
2169     public static final int COLUMN_FIELD_NUMBER = 2;
2170     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> column_;
2171     /**
2172      * <code>repeated .Column column = 2;</code>
2173      */
2174     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> getColumnList() {
2175       return column_;
2176     }
2177     /**
2178      * <code>repeated .Column column = 2;</code>
2179      */
2180     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> 
2181         getColumnOrBuilderList() {
2182       return column_;
2183     }
2184     /**
2185      * <code>repeated .Column column = 2;</code>
2186      */
2187     public int getColumnCount() {
2188       return column_.size();
2189     }
2190     /**
2191      * <code>repeated .Column column = 2;</code>
2192      */
2193     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) {
2194       return column_.get(index);
2195     }
2196     /**
2197      * <code>repeated .Column column = 2;</code>
2198      */
2199     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder(
2200         int index) {
2201       return column_.get(index);
2202     }
2203 
2204     // repeated .NameBytesPair attribute = 3;
2205     public static final int ATTRIBUTE_FIELD_NUMBER = 3;
2206     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_;
2207     /**
2208      * <code>repeated .NameBytesPair attribute = 3;</code>
2209      */
2210     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() {
2211       return attribute_;
2212     }
2213     /**
2214      * <code>repeated .NameBytesPair attribute = 3;</code>
2215      */
2216     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
2217         getAttributeOrBuilderList() {
2218       return attribute_;
2219     }
2220     /**
2221      * <code>repeated .NameBytesPair attribute = 3;</code>
2222      */
2223     public int getAttributeCount() {
2224       return attribute_.size();
2225     }
2226     /**
2227      * <code>repeated .NameBytesPair attribute = 3;</code>
2228      */
2229     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) {
2230       return attribute_.get(index);
2231     }
2232     /**
2233      * <code>repeated .NameBytesPair attribute = 3;</code>
2234      */
2235     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
2236         int index) {
2237       return attribute_.get(index);
2238     }
2239 
2240     // optional .Filter filter = 4;
2241     public static final int FILTER_FIELD_NUMBER = 4;
2242     private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_;
2243     /**
2244      * <code>optional .Filter filter = 4;</code>
2245      */
2246     public boolean hasFilter() {
2247       return ((bitField0_ & 0x00000002) == 0x00000002);
2248     }
2249     /**
2250      * <code>optional .Filter filter = 4;</code>
2251      */
2252     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
2253       return filter_;
2254     }
2255     /**
2256      * <code>optional .Filter filter = 4;</code>
2257      */
2258     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
2259       return filter_;
2260     }
2261 
2262     // optional .TimeRange time_range = 5;
2263     public static final int TIME_RANGE_FIELD_NUMBER = 5;
2264     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_;
2265     /**
2266      * <code>optional .TimeRange time_range = 5;</code>
2267      */
2268     public boolean hasTimeRange() {
2269       return ((bitField0_ & 0x00000004) == 0x00000004);
2270     }
2271     /**
2272      * <code>optional .TimeRange time_range = 5;</code>
2273      */
2274     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() {
2275       return timeRange_;
2276     }
2277     /**
2278      * <code>optional .TimeRange time_range = 5;</code>
2279      */
2280     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() {
2281       return timeRange_;
2282     }
2283 
2284     // optional uint32 max_versions = 6 [default = 1];
2285     public static final int MAX_VERSIONS_FIELD_NUMBER = 6;
2286     private int maxVersions_;
2287     /**
2288      * <code>optional uint32 max_versions = 6 [default = 1];</code>
2289      */
2290     public boolean hasMaxVersions() {
2291       return ((bitField0_ & 0x00000008) == 0x00000008);
2292     }
2293     /**
2294      * <code>optional uint32 max_versions = 6 [default = 1];</code>
2295      */
2296     public int getMaxVersions() {
2297       return maxVersions_;
2298     }
2299 
2300     // optional bool cache_blocks = 7 [default = true];
2301     public static final int CACHE_BLOCKS_FIELD_NUMBER = 7;
2302     private boolean cacheBlocks_;
2303     /**
2304      * <code>optional bool cache_blocks = 7 [default = true];</code>
2305      */
2306     public boolean hasCacheBlocks() {
2307       return ((bitField0_ & 0x00000010) == 0x00000010);
2308     }
2309     /**
2310      * <code>optional bool cache_blocks = 7 [default = true];</code>
2311      */
2312     public boolean getCacheBlocks() {
2313       return cacheBlocks_;
2314     }
2315 
2316     // optional uint32 store_limit = 8;
2317     public static final int STORE_LIMIT_FIELD_NUMBER = 8;
2318     private int storeLimit_;
2319     /**
2320      * <code>optional uint32 store_limit = 8;</code>
2321      */
2322     public boolean hasStoreLimit() {
2323       return ((bitField0_ & 0x00000020) == 0x00000020);
2324     }
2325     /**
2326      * <code>optional uint32 store_limit = 8;</code>
2327      */
2328     public int getStoreLimit() {
2329       return storeLimit_;
2330     }
2331 
2332     // optional uint32 store_offset = 9;
2333     public static final int STORE_OFFSET_FIELD_NUMBER = 9;
2334     private int storeOffset_;
2335     /**
2336      * <code>optional uint32 store_offset = 9;</code>
2337      */
2338     public boolean hasStoreOffset() {
2339       return ((bitField0_ & 0x00000040) == 0x00000040);
2340     }
2341     /**
2342      * <code>optional uint32 store_offset = 9;</code>
2343      */
2344     public int getStoreOffset() {
2345       return storeOffset_;
2346     }
2347 
2348     // optional bool existence_only = 10 [default = false];
2349     public static final int EXISTENCE_ONLY_FIELD_NUMBER = 10;
2350     private boolean existenceOnly_;
2351     /**
2352      * <code>optional bool existence_only = 10 [default = false];</code>
2353      *
2354      * <pre>
2355      * The result isn't asked for, just check for
2356      * the existence.
2357      * </pre>
2358      */
2359     public boolean hasExistenceOnly() {
2360       return ((bitField0_ & 0x00000080) == 0x00000080);
2361     }
2362     /**
2363      * <code>optional bool existence_only = 10 [default = false];</code>
2364      *
2365      * <pre>
2366      * The result isn't asked for, just check for
2367      * the existence.
2368      * </pre>
2369      */
2370     public boolean getExistenceOnly() {
2371       return existenceOnly_;
2372     }
2373 
2374     // optional bool closest_row_before = 11 [default = false];
2375     public static final int CLOSEST_ROW_BEFORE_FIELD_NUMBER = 11;
2376     private boolean closestRowBefore_;
2377     /**
2378      * <code>optional bool closest_row_before = 11 [default = false];</code>
2379      *
2380      * <pre>
2381      * If the row to get doesn't exist, return the
2382      * closest row before.
2383      * </pre>
2384      */
2385     public boolean hasClosestRowBefore() {
2386       return ((bitField0_ & 0x00000100) == 0x00000100);
2387     }
2388     /**
2389      * <code>optional bool closest_row_before = 11 [default = false];</code>
2390      *
2391      * <pre>
2392      * If the row to get doesn't exist, return the
2393      * closest row before.
2394      * </pre>
2395      */
2396     public boolean getClosestRowBefore() {
2397       return closestRowBefore_;
2398     }
2399 
2400     // optional .Consistency consistency = 12 [default = STRONG];
2401     public static final int CONSISTENCY_FIELD_NUMBER = 12;
2402     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency consistency_;
2403     /**
2404      * <code>optional .Consistency consistency = 12 [default = STRONG];</code>
2405      */
2406     public boolean hasConsistency() {
2407       return ((bitField0_ & 0x00000200) == 0x00000200);
2408     }
2409     /**
2410      * <code>optional .Consistency consistency = 12 [default = STRONG];</code>
2411      */
2412     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency() {
2413       return consistency_;
2414     }
2415 
2416     private void initFields() {
2417       row_ = com.google.protobuf.ByteString.EMPTY;
2418       column_ = java.util.Collections.emptyList();
2419       attribute_ = java.util.Collections.emptyList();
2420       filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
2421       timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
2422       maxVersions_ = 1;
2423       cacheBlocks_ = true;
2424       storeLimit_ = 0;
2425       storeOffset_ = 0;
2426       existenceOnly_ = false;
2427       closestRowBefore_ = false;
2428       consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
2429     }
2430     private byte memoizedIsInitialized = -1;
2431     public final boolean isInitialized() {
2432       byte isInitialized = memoizedIsInitialized;
2433       if (isInitialized != -1) return isInitialized == 1;
2434 
2435       if (!hasRow()) {
2436         memoizedIsInitialized = 0;
2437         return false;
2438       }
2439       for (int i = 0; i < getColumnCount(); i++) {
2440         if (!getColumn(i).isInitialized()) {
2441           memoizedIsInitialized = 0;
2442           return false;
2443         }
2444       }
2445       for (int i = 0; i < getAttributeCount(); i++) {
2446         if (!getAttribute(i).isInitialized()) {
2447           memoizedIsInitialized = 0;
2448           return false;
2449         }
2450       }
2451       if (hasFilter()) {
2452         if (!getFilter().isInitialized()) {
2453           memoizedIsInitialized = 0;
2454           return false;
2455         }
2456       }
2457       memoizedIsInitialized = 1;
2458       return true;
2459     }
2460 
2461     public void writeTo(com.google.protobuf.CodedOutputStream output)
2462                         throws java.io.IOException {
2463       getSerializedSize();
2464       if (((bitField0_ & 0x00000001) == 0x00000001)) {
2465         output.writeBytes(1, row_);
2466       }
2467       for (int i = 0; i < column_.size(); i++) {
2468         output.writeMessage(2, column_.get(i));
2469       }
2470       for (int i = 0; i < attribute_.size(); i++) {
2471         output.writeMessage(3, attribute_.get(i));
2472       }
2473       if (((bitField0_ & 0x00000002) == 0x00000002)) {
2474         output.writeMessage(4, filter_);
2475       }
2476       if (((bitField0_ & 0x00000004) == 0x00000004)) {
2477         output.writeMessage(5, timeRange_);
2478       }
2479       if (((bitField0_ & 0x00000008) == 0x00000008)) {
2480         output.writeUInt32(6, maxVersions_);
2481       }
2482       if (((bitField0_ & 0x00000010) == 0x00000010)) {
2483         output.writeBool(7, cacheBlocks_);
2484       }
2485       if (((bitField0_ & 0x00000020) == 0x00000020)) {
2486         output.writeUInt32(8, storeLimit_);
2487       }
2488       if (((bitField0_ & 0x00000040) == 0x00000040)) {
2489         output.writeUInt32(9, storeOffset_);
2490       }
2491       if (((bitField0_ & 0x00000080) == 0x00000080)) {
2492         output.writeBool(10, existenceOnly_);
2493       }
2494       if (((bitField0_ & 0x00000100) == 0x00000100)) {
2495         output.writeBool(11, closestRowBefore_);
2496       }
2497       if (((bitField0_ & 0x00000200) == 0x00000200)) {
2498         output.writeEnum(12, consistency_.getNumber());
2499       }
2500       getUnknownFields().writeTo(output);
2501     }
2502 
2503     private int memoizedSerializedSize = -1;
2504     public int getSerializedSize() {
2505       int size = memoizedSerializedSize;
2506       if (size != -1) return size;
2507 
2508       size = 0;
2509       if (((bitField0_ & 0x00000001) == 0x00000001)) {
2510         size += com.google.protobuf.CodedOutputStream
2511           .computeBytesSize(1, row_);
2512       }
2513       for (int i = 0; i < column_.size(); i++) {
2514         size += com.google.protobuf.CodedOutputStream
2515           .computeMessageSize(2, column_.get(i));
2516       }
2517       for (int i = 0; i < attribute_.size(); i++) {
2518         size += com.google.protobuf.CodedOutputStream
2519           .computeMessageSize(3, attribute_.get(i));
2520       }
2521       if (((bitField0_ & 0x00000002) == 0x00000002)) {
2522         size += com.google.protobuf.CodedOutputStream
2523           .computeMessageSize(4, filter_);
2524       }
2525       if (((bitField0_ & 0x00000004) == 0x00000004)) {
2526         size += com.google.protobuf.CodedOutputStream
2527           .computeMessageSize(5, timeRange_);
2528       }
2529       if (((bitField0_ & 0x00000008) == 0x00000008)) {
2530         size += com.google.protobuf.CodedOutputStream
2531           .computeUInt32Size(6, maxVersions_);
2532       }
2533       if (((bitField0_ & 0x00000010) == 0x00000010)) {
2534         size += com.google.protobuf.CodedOutputStream
2535           .computeBoolSize(7, cacheBlocks_);
2536       }
2537       if (((bitField0_ & 0x00000020) == 0x00000020)) {
2538         size += com.google.protobuf.CodedOutputStream
2539           .computeUInt32Size(8, storeLimit_);
2540       }
2541       if (((bitField0_ & 0x00000040) == 0x00000040)) {
2542         size += com.google.protobuf.CodedOutputStream
2543           .computeUInt32Size(9, storeOffset_);
2544       }
2545       if (((bitField0_ & 0x00000080) == 0x00000080)) {
2546         size += com.google.protobuf.CodedOutputStream
2547           .computeBoolSize(10, existenceOnly_);
2548       }
2549       if (((bitField0_ & 0x00000100) == 0x00000100)) {
2550         size += com.google.protobuf.CodedOutputStream
2551           .computeBoolSize(11, closestRowBefore_);
2552       }
2553       if (((bitField0_ & 0x00000200) == 0x00000200)) {
2554         size += com.google.protobuf.CodedOutputStream
2555           .computeEnumSize(12, consistency_.getNumber());
2556       }
2557       size += getUnknownFields().getSerializedSize();
2558       memoizedSerializedSize = size;
2559       return size;
2560     }
2561 
2562     private static final long serialVersionUID = 0L;
2563     @java.lang.Override
2564     protected java.lang.Object writeReplace()
2565         throws java.io.ObjectStreamException {
2566       return super.writeReplace();
2567     }
2568 
2569     @java.lang.Override
2570     public boolean equals(final java.lang.Object obj) {
2571       if (obj == this) {
2572        return true;
2573       }
2574       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get)) {
2575         return super.equals(obj);
2576       }
2577       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) obj;
2578 
2579       boolean result = true;
2580       result = result && (hasRow() == other.hasRow());
2581       if (hasRow()) {
2582         result = result && getRow()
2583             .equals(other.getRow());
2584       }
2585       result = result && getColumnList()
2586           .equals(other.getColumnList());
2587       result = result && getAttributeList()
2588           .equals(other.getAttributeList());
2589       result = result && (hasFilter() == other.hasFilter());
2590       if (hasFilter()) {
2591         result = result && getFilter()
2592             .equals(other.getFilter());
2593       }
2594       result = result && (hasTimeRange() == other.hasTimeRange());
2595       if (hasTimeRange()) {
2596         result = result && getTimeRange()
2597             .equals(other.getTimeRange());
2598       }
2599       result = result && (hasMaxVersions() == other.hasMaxVersions());
2600       if (hasMaxVersions()) {
2601         result = result && (getMaxVersions()
2602             == other.getMaxVersions());
2603       }
2604       result = result && (hasCacheBlocks() == other.hasCacheBlocks());
2605       if (hasCacheBlocks()) {
2606         result = result && (getCacheBlocks()
2607             == other.getCacheBlocks());
2608       }
2609       result = result && (hasStoreLimit() == other.hasStoreLimit());
2610       if (hasStoreLimit()) {
2611         result = result && (getStoreLimit()
2612             == other.getStoreLimit());
2613       }
2614       result = result && (hasStoreOffset() == other.hasStoreOffset());
2615       if (hasStoreOffset()) {
2616         result = result && (getStoreOffset()
2617             == other.getStoreOffset());
2618       }
2619       result = result && (hasExistenceOnly() == other.hasExistenceOnly());
2620       if (hasExistenceOnly()) {
2621         result = result && (getExistenceOnly()
2622             == other.getExistenceOnly());
2623       }
2624       result = result && (hasClosestRowBefore() == other.hasClosestRowBefore());
2625       if (hasClosestRowBefore()) {
2626         result = result && (getClosestRowBefore()
2627             == other.getClosestRowBefore());
2628       }
2629       result = result && (hasConsistency() == other.hasConsistency());
2630       if (hasConsistency()) {
2631         result = result &&
2632             (getConsistency() == other.getConsistency());
2633       }
2634       result = result &&
2635           getUnknownFields().equals(other.getUnknownFields());
2636       return result;
2637     }
2638 
2639     private int memoizedHashCode = 0;
2640     @java.lang.Override
2641     public int hashCode() {
2642       if (memoizedHashCode != 0) {
2643         return memoizedHashCode;
2644       }
2645       int hash = 41;
2646       hash = (19 * hash) + getDescriptorForType().hashCode();
2647       if (hasRow()) {
2648         hash = (37 * hash) + ROW_FIELD_NUMBER;
2649         hash = (53 * hash) + getRow().hashCode();
2650       }
2651       if (getColumnCount() > 0) {
2652         hash = (37 * hash) + COLUMN_FIELD_NUMBER;
2653         hash = (53 * hash) + getColumnList().hashCode();
2654       }
2655       if (getAttributeCount() > 0) {
2656         hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER;
2657         hash = (53 * hash) + getAttributeList().hashCode();
2658       }
2659       if (hasFilter()) {
2660         hash = (37 * hash) + FILTER_FIELD_NUMBER;
2661         hash = (53 * hash) + getFilter().hashCode();
2662       }
2663       if (hasTimeRange()) {
2664         hash = (37 * hash) + TIME_RANGE_FIELD_NUMBER;
2665         hash = (53 * hash) + getTimeRange().hashCode();
2666       }
2667       if (hasMaxVersions()) {
2668         hash = (37 * hash) + MAX_VERSIONS_FIELD_NUMBER;
2669         hash = (53 * hash) + getMaxVersions();
2670       }
2671       if (hasCacheBlocks()) {
2672         hash = (37 * hash) + CACHE_BLOCKS_FIELD_NUMBER;
2673         hash = (53 * hash) + hashBoolean(getCacheBlocks());
2674       }
2675       if (hasStoreLimit()) {
2676         hash = (37 * hash) + STORE_LIMIT_FIELD_NUMBER;
2677         hash = (53 * hash) + getStoreLimit();
2678       }
2679       if (hasStoreOffset()) {
2680         hash = (37 * hash) + STORE_OFFSET_FIELD_NUMBER;
2681         hash = (53 * hash) + getStoreOffset();
2682       }
2683       if (hasExistenceOnly()) {
2684         hash = (37 * hash) + EXISTENCE_ONLY_FIELD_NUMBER;
2685         hash = (53 * hash) + hashBoolean(getExistenceOnly());
2686       }
2687       if (hasClosestRowBefore()) {
2688         hash = (37 * hash) + CLOSEST_ROW_BEFORE_FIELD_NUMBER;
2689         hash = (53 * hash) + hashBoolean(getClosestRowBefore());
2690       }
2691       if (hasConsistency()) {
2692         hash = (37 * hash) + CONSISTENCY_FIELD_NUMBER;
2693         hash = (53 * hash) + hashEnum(getConsistency());
2694       }
2695       hash = (29 * hash) + getUnknownFields().hashCode();
2696       memoizedHashCode = hash;
2697       return hash;
2698     }
2699 
2700     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(
2701         com.google.protobuf.ByteString data)
2702         throws com.google.protobuf.InvalidProtocolBufferException {
2703       return PARSER.parseFrom(data);
2704     }
2705     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(
2706         com.google.protobuf.ByteString data,
2707         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2708         throws com.google.protobuf.InvalidProtocolBufferException {
2709       return PARSER.parseFrom(data, extensionRegistry);
2710     }
2711     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(byte[] data)
2712         throws com.google.protobuf.InvalidProtocolBufferException {
2713       return PARSER.parseFrom(data);
2714     }
2715     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(
2716         byte[] data,
2717         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2718         throws com.google.protobuf.InvalidProtocolBufferException {
2719       return PARSER.parseFrom(data, extensionRegistry);
2720     }
2721     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(java.io.InputStream input)
2722         throws java.io.IOException {
2723       return PARSER.parseFrom(input);
2724     }
2725     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(
2726         java.io.InputStream input,
2727         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2728         throws java.io.IOException {
2729       return PARSER.parseFrom(input, extensionRegistry);
2730     }
2731     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseDelimitedFrom(java.io.InputStream input)
2732         throws java.io.IOException {
2733       return PARSER.parseDelimitedFrom(input);
2734     }
2735     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseDelimitedFrom(
2736         java.io.InputStream input,
2737         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2738         throws java.io.IOException {
2739       return PARSER.parseDelimitedFrom(input, extensionRegistry);
2740     }
2741     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(
2742         com.google.protobuf.CodedInputStream input)
2743         throws java.io.IOException {
2744       return PARSER.parseFrom(input);
2745     }
2746     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(
2747         com.google.protobuf.CodedInputStream input,
2748         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2749         throws java.io.IOException {
2750       return PARSER.parseFrom(input, extensionRegistry);
2751     }
2752 
2753     public static Builder newBuilder() { return Builder.create(); }
2754     public Builder newBuilderForType() { return newBuilder(); }
2755     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get prototype) {
2756       return newBuilder().mergeFrom(prototype);
2757     }
2758     public Builder toBuilder() { return newBuilder(this); }
2759 
2760     @java.lang.Override
2761     protected Builder newBuilderForType(
2762         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2763       Builder builder = new Builder(parent);
2764       return builder;
2765     }
2766     /**
2767      * Protobuf type {@code Get}
2768      *
2769      * <pre>
2770      **
2771      * The protocol buffer version of Get.
2772      * Unless existence_only is specified, return all the requested data
2773      * for the row that matches exactly, or the one that immediately
2774      * precedes it if closest_row_before is specified.
2775      * </pre>
2776      */
2777     public static final class Builder extends
2778         com.google.protobuf.GeneratedMessage.Builder<Builder>
2779        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder {
2780       public static final com.google.protobuf.Descriptors.Descriptor
2781           getDescriptor() {
2782         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_descriptor;
2783       }
2784 
2785       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
2786           internalGetFieldAccessorTable() {
2787         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_fieldAccessorTable
2788             .ensureFieldAccessorsInitialized(
2789                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder.class);
2790       }
2791 
2792       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder()
2793       private Builder() {
2794         maybeForceBuilderInitialization();
2795       }
2796 
2797       private Builder(
2798           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2799         super(parent);
2800         maybeForceBuilderInitialization();
2801       }
2802       private void maybeForceBuilderInitialization() {
2803         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2804           getColumnFieldBuilder();
2805           getAttributeFieldBuilder();
2806           getFilterFieldBuilder();
2807           getTimeRangeFieldBuilder();
2808         }
2809       }
2810       private static Builder create() {
2811         return new Builder();
2812       }
2813 
2814       public Builder clear() {
2815         super.clear();
2816         row_ = com.google.protobuf.ByteString.EMPTY;
2817         bitField0_ = (bitField0_ & ~0x00000001);
2818         if (columnBuilder_ == null) {
2819           column_ = java.util.Collections.emptyList();
2820           bitField0_ = (bitField0_ & ~0x00000002);
2821         } else {
2822           columnBuilder_.clear();
2823         }
2824         if (attributeBuilder_ == null) {
2825           attribute_ = java.util.Collections.emptyList();
2826           bitField0_ = (bitField0_ & ~0x00000004);
2827         } else {
2828           attributeBuilder_.clear();
2829         }
2830         if (filterBuilder_ == null) {
2831           filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
2832         } else {
2833           filterBuilder_.clear();
2834         }
2835         bitField0_ = (bitField0_ & ~0x00000008);
2836         if (timeRangeBuilder_ == null) {
2837           timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
2838         } else {
2839           timeRangeBuilder_.clear();
2840         }
2841         bitField0_ = (bitField0_ & ~0x00000010);
2842         maxVersions_ = 1;
2843         bitField0_ = (bitField0_ & ~0x00000020);
2844         cacheBlocks_ = true;
2845         bitField0_ = (bitField0_ & ~0x00000040);
2846         storeLimit_ = 0;
2847         bitField0_ = (bitField0_ & ~0x00000080);
2848         storeOffset_ = 0;
2849         bitField0_ = (bitField0_ & ~0x00000100);
2850         existenceOnly_ = false;
2851         bitField0_ = (bitField0_ & ~0x00000200);
2852         closestRowBefore_ = false;
2853         bitField0_ = (bitField0_ & ~0x00000400);
2854         consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
2855         bitField0_ = (bitField0_ & ~0x00000800);
2856         return this;
2857       }
2858 
2859       public Builder clone() {
2860         return create().mergeFrom(buildPartial());
2861       }
2862 
2863       public com.google.protobuf.Descriptors.Descriptor
2864           getDescriptorForType() {
2865         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_descriptor;
2866       }
2867 
2868       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getDefaultInstanceForType() {
2869         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
2870       }
2871 
2872       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get build() {
2873         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = buildPartial();
2874         if (!result.isInitialized()) {
2875           throw newUninitializedMessageException(result);
2876         }
2877         return result;
2878       }
2879 
2880       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get buildPartial() {
2881         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get(this);
2882         int from_bitField0_ = bitField0_;
2883         int to_bitField0_ = 0;
2884         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
2885           to_bitField0_ |= 0x00000001;
2886         }
2887         result.row_ = row_;
2888         if (columnBuilder_ == null) {
2889           if (((bitField0_ & 0x00000002) == 0x00000002)) {
2890             column_ = java.util.Collections.unmodifiableList(column_);
2891             bitField0_ = (bitField0_ & ~0x00000002);
2892           }
2893           result.column_ = column_;
2894         } else {
2895           result.column_ = columnBuilder_.build();
2896         }
2897         if (attributeBuilder_ == null) {
2898           if (((bitField0_ & 0x00000004) == 0x00000004)) {
2899             attribute_ = java.util.Collections.unmodifiableList(attribute_);
2900             bitField0_ = (bitField0_ & ~0x00000004);
2901           }
2902           result.attribute_ = attribute_;
2903         } else {
2904           result.attribute_ = attributeBuilder_.build();
2905         }
2906         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
2907           to_bitField0_ |= 0x00000002;
2908         }
2909         if (filterBuilder_ == null) {
2910           result.filter_ = filter_;
2911         } else {
2912           result.filter_ = filterBuilder_.build();
2913         }
2914         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
2915           to_bitField0_ |= 0x00000004;
2916         }
2917         if (timeRangeBuilder_ == null) {
2918           result.timeRange_ = timeRange_;
2919         } else {
2920           result.timeRange_ = timeRangeBuilder_.build();
2921         }
2922         if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
2923           to_bitField0_ |= 0x00000008;
2924         }
2925         result.maxVersions_ = maxVersions_;
2926         if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
2927           to_bitField0_ |= 0x00000010;
2928         }
2929         result.cacheBlocks_ = cacheBlocks_;
2930         if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
2931           to_bitField0_ |= 0x00000020;
2932         }
2933         result.storeLimit_ = storeLimit_;
2934         if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
2935           to_bitField0_ |= 0x00000040;
2936         }
2937         result.storeOffset_ = storeOffset_;
2938         if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
2939           to_bitField0_ |= 0x00000080;
2940         }
2941         result.existenceOnly_ = existenceOnly_;
2942         if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
2943           to_bitField0_ |= 0x00000100;
2944         }
2945         result.closestRowBefore_ = closestRowBefore_;
2946         if (((from_bitField0_ & 0x00000800) == 0x00000800)) {
2947           to_bitField0_ |= 0x00000200;
2948         }
2949         result.consistency_ = consistency_;
2950         result.bitField0_ = to_bitField0_;
2951         onBuilt();
2952         return result;
2953       }
2954 
2955       public Builder mergeFrom(com.google.protobuf.Message other) {
2956         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) {
2957           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get)other);
2958         } else {
2959           super.mergeFrom(other);
2960           return this;
2961         }
2962       }
2963 
2964       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get other) {
2965         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) return this;
2966         if (other.hasRow()) {
2967           setRow(other.getRow());
2968         }
2969         if (columnBuilder_ == null) {
2970           if (!other.column_.isEmpty()) {
2971             if (column_.isEmpty()) {
2972               column_ = other.column_;
2973               bitField0_ = (bitField0_ & ~0x00000002);
2974             } else {
2975               ensureColumnIsMutable();
2976               column_.addAll(other.column_);
2977             }
2978             onChanged();
2979           }
2980         } else {
2981           if (!other.column_.isEmpty()) {
2982             if (columnBuilder_.isEmpty()) {
2983               columnBuilder_.dispose();
2984               columnBuilder_ = null;
2985               column_ = other.column_;
2986               bitField0_ = (bitField0_ & ~0x00000002);
2987               columnBuilder_ = 
2988                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
2989                    getColumnFieldBuilder() : null;
2990             } else {
2991               columnBuilder_.addAllMessages(other.column_);
2992             }
2993           }
2994         }
2995         if (attributeBuilder_ == null) {
2996           if (!other.attribute_.isEmpty()) {
2997             if (attribute_.isEmpty()) {
2998               attribute_ = other.attribute_;
2999               bitField0_ = (bitField0_ & ~0x00000004);
3000             } else {
3001               ensureAttributeIsMutable();
3002               attribute_.addAll(other.attribute_);
3003             }
3004             onChanged();
3005           }
3006         } else {
3007           if (!other.attribute_.isEmpty()) {
3008             if (attributeBuilder_.isEmpty()) {
3009               attributeBuilder_.dispose();
3010               attributeBuilder_ = null;
3011               attribute_ = other.attribute_;
3012               bitField0_ = (bitField0_ & ~0x00000004);
3013               attributeBuilder_ = 
3014                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
3015                    getAttributeFieldBuilder() : null;
3016             } else {
3017               attributeBuilder_.addAllMessages(other.attribute_);
3018             }
3019           }
3020         }
3021         if (other.hasFilter()) {
3022           mergeFilter(other.getFilter());
3023         }
3024         if (other.hasTimeRange()) {
3025           mergeTimeRange(other.getTimeRange());
3026         }
3027         if (other.hasMaxVersions()) {
3028           setMaxVersions(other.getMaxVersions());
3029         }
3030         if (other.hasCacheBlocks()) {
3031           setCacheBlocks(other.getCacheBlocks());
3032         }
3033         if (other.hasStoreLimit()) {
3034           setStoreLimit(other.getStoreLimit());
3035         }
3036         if (other.hasStoreOffset()) {
3037           setStoreOffset(other.getStoreOffset());
3038         }
3039         if (other.hasExistenceOnly()) {
3040           setExistenceOnly(other.getExistenceOnly());
3041         }
3042         if (other.hasClosestRowBefore()) {
3043           setClosestRowBefore(other.getClosestRowBefore());
3044         }
3045         if (other.hasConsistency()) {
3046           setConsistency(other.getConsistency());
3047         }
3048         this.mergeUnknownFields(other.getUnknownFields());
3049         return this;
3050       }
3051 
3052       public final boolean isInitialized() {
3053         if (!hasRow()) {
3054           
3055           return false;
3056         }
3057         for (int i = 0; i < getColumnCount(); i++) {
3058           if (!getColumn(i).isInitialized()) {
3059             
3060             return false;
3061           }
3062         }
3063         for (int i = 0; i < getAttributeCount(); i++) {
3064           if (!getAttribute(i).isInitialized()) {
3065             
3066             return false;
3067           }
3068         }
3069         if (hasFilter()) {
3070           if (!getFilter().isInitialized()) {
3071             
3072             return false;
3073           }
3074         }
3075         return true;
3076       }
3077 
3078       public Builder mergeFrom(
3079           com.google.protobuf.CodedInputStream input,
3080           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3081           throws java.io.IOException {
3082         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parsedMessage = null;
3083         try {
3084           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3085         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3086           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) e.getUnfinishedMessage();
3087           throw e;
3088         } finally {
3089           if (parsedMessage != null) {
3090             mergeFrom(parsedMessage);
3091           }
3092         }
3093         return this;
3094       }
3095       private int bitField0_;
3096 
3097       // required bytes row = 1;
3098       private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY;
3099       /**
3100        * <code>required bytes row = 1;</code>
3101        */
3102       public boolean hasRow() {
3103         return ((bitField0_ & 0x00000001) == 0x00000001);
3104       }
3105       /**
3106        * <code>required bytes row = 1;</code>
3107        */
3108       public com.google.protobuf.ByteString getRow() {
3109         return row_;
3110       }
3111       /**
3112        * <code>required bytes row = 1;</code>
3113        */
3114       public Builder setRow(com.google.protobuf.ByteString value) {
3115         if (value == null) {
3116     throw new NullPointerException();
3117   }
3118   bitField0_ |= 0x00000001;
3119         row_ = value;
3120         onChanged();
3121         return this;
3122       }
3123       /**
3124        * <code>required bytes row = 1;</code>
3125        */
3126       public Builder clearRow() {
3127         bitField0_ = (bitField0_ & ~0x00000001);
3128         row_ = getDefaultInstance().getRow();
3129         onChanged();
3130         return this;
3131       }
3132 
3133       // repeated .Column column = 2;
3134       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> column_ =
3135         java.util.Collections.emptyList();
3136       private void ensureColumnIsMutable() {
3137         if (!((bitField0_ & 0x00000002) == 0x00000002)) {
3138           column_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column>(column_);
3139           bitField0_ |= 0x00000002;
3140          }
3141       }
3142 
3143       private com.google.protobuf.RepeatedFieldBuilder<
3144           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> columnBuilder_;
3145 
3146       /**
3147        * <code>repeated .Column column = 2;</code>
3148        */
3149       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> getColumnList() {
3150         if (columnBuilder_ == null) {
3151           return java.util.Collections.unmodifiableList(column_);
3152         } else {
3153           return columnBuilder_.getMessageList();
3154         }
3155       }
3156       /**
3157        * <code>repeated .Column column = 2;</code>
3158        */
3159       public int getColumnCount() {
3160         if (columnBuilder_ == null) {
3161           return column_.size();
3162         } else {
3163           return columnBuilder_.getCount();
3164         }
3165       }
3166       /**
3167        * <code>repeated .Column column = 2;</code>
3168        */
3169       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) {
3170         if (columnBuilder_ == null) {
3171           return column_.get(index);
3172         } else {
3173           return columnBuilder_.getMessage(index);
3174         }
3175       }
3176       /**
3177        * <code>repeated .Column column = 2;</code>
3178        */
3179       public Builder setColumn(
3180           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) {
3181         if (columnBuilder_ == null) {
3182           if (value == null) {
3183             throw new NullPointerException();
3184           }
3185           ensureColumnIsMutable();
3186           column_.set(index, value);
3187           onChanged();
3188         } else {
3189           columnBuilder_.setMessage(index, value);
3190         }
3191         return this;
3192       }
3193       /**
3194        * <code>repeated .Column column = 2;</code>
3195        */
3196       public Builder setColumn(
3197           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) {
3198         if (columnBuilder_ == null) {
3199           ensureColumnIsMutable();
3200           column_.set(index, builderForValue.build());
3201           onChanged();
3202         } else {
3203           columnBuilder_.setMessage(index, builderForValue.build());
3204         }
3205         return this;
3206       }
3207       /**
3208        * <code>repeated .Column column = 2;</code>
3209        */
3210       public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) {
3211         if (columnBuilder_ == null) {
3212           if (value == null) {
3213             throw new NullPointerException();
3214           }
3215           ensureColumnIsMutable();
3216           column_.add(value);
3217           onChanged();
3218         } else {
3219           columnBuilder_.addMessage(value);
3220         }
3221         return this;
3222       }
3223       /**
3224        * <code>repeated .Column column = 2;</code>
3225        */
3226       public Builder addColumn(
3227           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) {
3228         if (columnBuilder_ == null) {
3229           if (value == null) {
3230             throw new NullPointerException();
3231           }
3232           ensureColumnIsMutable();
3233           column_.add(index, value);
3234           onChanged();
3235         } else {
3236           columnBuilder_.addMessage(index, value);
3237         }
3238         return this;
3239       }
3240       /**
3241        * <code>repeated .Column column = 2;</code>
3242        */
3243       public Builder addColumn(
3244           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) {
3245         if (columnBuilder_ == null) {
3246           ensureColumnIsMutable();
3247           column_.add(builderForValue.build());
3248           onChanged();
3249         } else {
3250           columnBuilder_.addMessage(builderForValue.build());
3251         }
3252         return this;
3253       }
3254       /**
3255        * <code>repeated .Column column = 2;</code>
3256        */
3257       public Builder addColumn(
3258           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) {
3259         if (columnBuilder_ == null) {
3260           ensureColumnIsMutable();
3261           column_.add(index, builderForValue.build());
3262           onChanged();
3263         } else {
3264           columnBuilder_.addMessage(index, builderForValue.build());
3265         }
3266         return this;
3267       }
3268       /**
3269        * <code>repeated .Column column = 2;</code>
3270        */
3271       public Builder addAllColumn(
3272           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> values) {
3273         if (columnBuilder_ == null) {
3274           ensureColumnIsMutable();
3275           super.addAll(values, column_);
3276           onChanged();
3277         } else {
3278           columnBuilder_.addAllMessages(values);
3279         }
3280         return this;
3281       }
3282       /**
3283        * <code>repeated .Column column = 2;</code>
3284        */
3285       public Builder clearColumn() {
3286         if (columnBuilder_ == null) {
3287           column_ = java.util.Collections.emptyList();
3288           bitField0_ = (bitField0_ & ~0x00000002);
3289           onChanged();
3290         } else {
3291           columnBuilder_.clear();
3292         }
3293         return this;
3294       }
3295       /**
3296        * <code>repeated .Column column = 2;</code>
3297        */
3298       public Builder removeColumn(int index) {
3299         if (columnBuilder_ == null) {
3300           ensureColumnIsMutable();
3301           column_.remove(index);
3302           onChanged();
3303         } else {
3304           columnBuilder_.remove(index);
3305         }
3306         return this;
3307       }
3308       /**
3309        * <code>repeated .Column column = 2;</code>
3310        */
3311       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder getColumnBuilder(
3312           int index) {
3313         return getColumnFieldBuilder().getBuilder(index);
3314       }
3315       /**
3316        * <code>repeated .Column column = 2;</code>
3317        */
3318       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder(
3319           int index) {
3320         if (columnBuilder_ == null) {
3321           return column_.get(index);  } else {
3322           return columnBuilder_.getMessageOrBuilder(index);
3323         }
3324       }
3325       /**
3326        * <code>repeated .Column column = 2;</code>
3327        */
3328       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> 
3329            getColumnOrBuilderList() {
3330         if (columnBuilder_ != null) {
3331           return columnBuilder_.getMessageOrBuilderList();
3332         } else {
3333           return java.util.Collections.unmodifiableList(column_);
3334         }
3335       }
3336       /**
3337        * <code>repeated .Column column = 2;</code>
3338        */
3339       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder() {
3340         return getColumnFieldBuilder().addBuilder(
3341             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance());
3342       }
3343       /**
3344        * <code>repeated .Column column = 2;</code>
3345        */
3346       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder(
3347           int index) {
3348         return getColumnFieldBuilder().addBuilder(
3349             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance());
3350       }
3351       /**
3352        * <code>repeated .Column column = 2;</code>
3353        */
3354       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder> 
3355            getColumnBuilderList() {
3356         return getColumnFieldBuilder().getBuilderList();
3357       }
3358       private com.google.protobuf.RepeatedFieldBuilder<
3359           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> 
3360           getColumnFieldBuilder() {
3361         if (columnBuilder_ == null) {
3362           columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
3363               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>(
3364                   column_,
3365                   ((bitField0_ & 0x00000002) == 0x00000002),
3366                   getParentForChildren(),
3367                   isClean());
3368           column_ = null;
3369         }
3370         return columnBuilder_;
3371       }
3372 
3373       // repeated .NameBytesPair attribute = 3;
3374       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_ =
3375         java.util.Collections.emptyList();
3376       private void ensureAttributeIsMutable() {
3377         if (!((bitField0_ & 0x00000004) == 0x00000004)) {
3378           attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>(attribute_);
3379           bitField0_ |= 0x00000004;
3380          }
3381       }
3382 
3383       private com.google.protobuf.RepeatedFieldBuilder<
3384           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_;
3385 
3386       /**
3387        * <code>repeated .NameBytesPair attribute = 3;</code>
3388        */
3389       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() {
3390         if (attributeBuilder_ == null) {
3391           return java.util.Collections.unmodifiableList(attribute_);
3392         } else {
3393           return attributeBuilder_.getMessageList();
3394         }
3395       }
3396       /**
3397        * <code>repeated .NameBytesPair attribute = 3;</code>
3398        */
3399       public int getAttributeCount() {
3400         if (attributeBuilder_ == null) {
3401           return attribute_.size();
3402         } else {
3403           return attributeBuilder_.getCount();
3404         }
3405       }
3406       /**
3407        * <code>repeated .NameBytesPair attribute = 3;</code>
3408        */
3409       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) {
3410         if (attributeBuilder_ == null) {
3411           return attribute_.get(index);
3412         } else {
3413           return attributeBuilder_.getMessage(index);
3414         }
3415       }
3416       /**
3417        * <code>repeated .NameBytesPair attribute = 3;</code>
3418        */
3419       public Builder setAttribute(
3420           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
3421         if (attributeBuilder_ == null) {
3422           if (value == null) {
3423             throw new NullPointerException();
3424           }
3425           ensureAttributeIsMutable();
3426           attribute_.set(index, value);
3427           onChanged();
3428         } else {
3429           attributeBuilder_.setMessage(index, value);
3430         }
3431         return this;
3432       }
3433       /**
3434        * <code>repeated .NameBytesPair attribute = 3;</code>
3435        */
3436       public Builder setAttribute(
3437           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
3438         if (attributeBuilder_ == null) {
3439           ensureAttributeIsMutable();
3440           attribute_.set(index, builderForValue.build());
3441           onChanged();
3442         } else {
3443           attributeBuilder_.setMessage(index, builderForValue.build());
3444         }
3445         return this;
3446       }
3447       /**
3448        * <code>repeated .NameBytesPair attribute = 3;</code>
3449        */
3450       public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
3451         if (attributeBuilder_ == null) {
3452           if (value == null) {
3453             throw new NullPointerException();
3454           }
3455           ensureAttributeIsMutable();
3456           attribute_.add(value);
3457           onChanged();
3458         } else {
3459           attributeBuilder_.addMessage(value);
3460         }
3461         return this;
3462       }
3463       /**
3464        * <code>repeated .NameBytesPair attribute = 3;</code>
3465        */
3466       public Builder addAttribute(
3467           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
3468         if (attributeBuilder_ == null) {
3469           if (value == null) {
3470             throw new NullPointerException();
3471           }
3472           ensureAttributeIsMutable();
3473           attribute_.add(index, value);
3474           onChanged();
3475         } else {
3476           attributeBuilder_.addMessage(index, value);
3477         }
3478         return this;
3479       }
3480       /**
3481        * <code>repeated .NameBytesPair attribute = 3;</code>
3482        */
3483       public Builder addAttribute(
3484           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
3485         if (attributeBuilder_ == null) {
3486           ensureAttributeIsMutable();
3487           attribute_.add(builderForValue.build());
3488           onChanged();
3489         } else {
3490           attributeBuilder_.addMessage(builderForValue.build());
3491         }
3492         return this;
3493       }
3494       /**
3495        * <code>repeated .NameBytesPair attribute = 3;</code>
3496        */
3497       public Builder addAttribute(
3498           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
3499         if (attributeBuilder_ == null) {
3500           ensureAttributeIsMutable();
3501           attribute_.add(index, builderForValue.build());
3502           onChanged();
3503         } else {
3504           attributeBuilder_.addMessage(index, builderForValue.build());
3505         }
3506         return this;
3507       }
3508       /**
3509        * <code>repeated .NameBytesPair attribute = 3;</code>
3510        */
3511       public Builder addAllAttribute(
3512           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> values) {
3513         if (attributeBuilder_ == null) {
3514           ensureAttributeIsMutable();
3515           super.addAll(values, attribute_);
3516           onChanged();
3517         } else {
3518           attributeBuilder_.addAllMessages(values);
3519         }
3520         return this;
3521       }
3522       /**
3523        * <code>repeated .NameBytesPair attribute = 3;</code>
3524        */
3525       public Builder clearAttribute() {
3526         if (attributeBuilder_ == null) {
3527           attribute_ = java.util.Collections.emptyList();
3528           bitField0_ = (bitField0_ & ~0x00000004);
3529           onChanged();
3530         } else {
3531           attributeBuilder_.clear();
3532         }
3533         return this;
3534       }
3535       /**
3536        * <code>repeated .NameBytesPair attribute = 3;</code>
3537        */
3538       public Builder removeAttribute(int index) {
3539         if (attributeBuilder_ == null) {
3540           ensureAttributeIsMutable();
3541           attribute_.remove(index);
3542           onChanged();
3543         } else {
3544           attributeBuilder_.remove(index);
3545         }
3546         return this;
3547       }
3548       /**
3549        * <code>repeated .NameBytesPair attribute = 3;</code>
3550        */
3551       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder(
3552           int index) {
3553         return getAttributeFieldBuilder().getBuilder(index);
3554       }
3555       /**
3556        * <code>repeated .NameBytesPair attribute = 3;</code>
3557        */
3558       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
3559           int index) {
3560         if (attributeBuilder_ == null) {
3561           return attribute_.get(index);  } else {
3562           return attributeBuilder_.getMessageOrBuilder(index);
3563         }
3564       }
3565       /**
3566        * <code>repeated .NameBytesPair attribute = 3;</code>
3567        */
3568       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
3569            getAttributeOrBuilderList() {
3570         if (attributeBuilder_ != null) {
3571           return attributeBuilder_.getMessageOrBuilderList();
3572         } else {
3573           return java.util.Collections.unmodifiableList(attribute_);
3574         }
3575       }
3576       /**
3577        * <code>repeated .NameBytesPair attribute = 3;</code>
3578        */
3579       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() {
3580         return getAttributeFieldBuilder().addBuilder(
3581             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
3582       }
3583       /**
3584        * <code>repeated .NameBytesPair attribute = 3;</code>
3585        */
3586       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder(
3587           int index) {
3588         return getAttributeFieldBuilder().addBuilder(
3589             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
3590       }
3591       /**
3592        * <code>repeated .NameBytesPair attribute = 3;</code>
3593        */
3594       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder> 
3595            getAttributeBuilderList() {
3596         return getAttributeFieldBuilder().getBuilderList();
3597       }
3598       private com.google.protobuf.RepeatedFieldBuilder<
3599           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
3600           getAttributeFieldBuilder() {
3601         if (attributeBuilder_ == null) {
3602           attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
3603               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
3604                   attribute_,
3605                   ((bitField0_ & 0x00000004) == 0x00000004),
3606                   getParentForChildren(),
3607                   isClean());
3608           attribute_ = null;
3609         }
3610         return attributeBuilder_;
3611       }
3612 
3613       // optional .Filter filter = 4;
3614       private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
3615       private com.google.protobuf.SingleFieldBuilder<
3616           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_;
3617       /**
3618        * <code>optional .Filter filter = 4;</code>
3619        */
3620       public boolean hasFilter() {
3621         return ((bitField0_ & 0x00000008) == 0x00000008);
3622       }
3623       /**
3624        * <code>optional .Filter filter = 4;</code>
3625        */
3626       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
3627         if (filterBuilder_ == null) {
3628           return filter_;
3629         } else {
3630           return filterBuilder_.getMessage();
3631         }
3632       }
3633       /**
3634        * <code>optional .Filter filter = 4;</code>
3635        */
3636       public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
3637         if (filterBuilder_ == null) {
3638           if (value == null) {
3639             throw new NullPointerException();
3640           }
3641           filter_ = value;
3642           onChanged();
3643         } else {
3644           filterBuilder_.setMessage(value);
3645         }
3646         bitField0_ |= 0x00000008;
3647         return this;
3648       }
3649       /**
3650        * <code>optional .Filter filter = 4;</code>
3651        */
3652       public Builder setFilter(
3653           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) {
3654         if (filterBuilder_ == null) {
3655           filter_ = builderForValue.build();
3656           onChanged();
3657         } else {
3658           filterBuilder_.setMessage(builderForValue.build());
3659         }
3660         bitField0_ |= 0x00000008;
3661         return this;
3662       }
3663       /**
3664        * <code>optional .Filter filter = 4;</code>
3665        */
3666       public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
3667         if (filterBuilder_ == null) {
3668           if (((bitField0_ & 0x00000008) == 0x00000008) &&
3669               filter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) {
3670             filter_ =
3671               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial();
3672           } else {
3673             filter_ = value;
3674           }
3675           onChanged();
3676         } else {
3677           filterBuilder_.mergeFrom(value);
3678         }
3679         bitField0_ |= 0x00000008;
3680         return this;
3681       }
3682       /**
3683        * <code>optional .Filter filter = 4;</code>
3684        */
3685       public Builder clearFilter() {
3686         if (filterBuilder_ == null) {
3687           filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
3688           onChanged();
3689         } else {
3690           filterBuilder_.clear();
3691         }
3692         bitField0_ = (bitField0_ & ~0x00000008);
3693         return this;
3694       }
3695       /**
3696        * <code>optional .Filter filter = 4;</code>
3697        */
3698       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder getFilterBuilder() {
3699         bitField0_ |= 0x00000008;
3700         onChanged();
3701         return getFilterFieldBuilder().getBuilder();
3702       }
3703       /**
3704        * <code>optional .Filter filter = 4;</code>
3705        */
3706       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
3707         if (filterBuilder_ != null) {
3708           return filterBuilder_.getMessageOrBuilder();
3709         } else {
3710           return filter_;
3711         }
3712       }
3713       /**
3714        * <code>optional .Filter filter = 4;</code>
3715        */
3716       private com.google.protobuf.SingleFieldBuilder<
3717           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> 
3718           getFilterFieldBuilder() {
3719         if (filterBuilder_ == null) {
3720           filterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
3721               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>(
3722                   filter_,
3723                   getParentForChildren(),
3724                   isClean());
3725           filter_ = null;
3726         }
3727         return filterBuilder_;
3728       }
3729 
3730       // optional .TimeRange time_range = 5;
3731       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
3732       private com.google.protobuf.SingleFieldBuilder<
3733           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_;
3734       /**
3735        * <code>optional .TimeRange time_range = 5;</code>
3736        */
3737       public boolean hasTimeRange() {
3738         return ((bitField0_ & 0x00000010) == 0x00000010);
3739       }
3740       /**
3741        * <code>optional .TimeRange time_range = 5;</code>
3742        */
3743       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() {
3744         if (timeRangeBuilder_ == null) {
3745           return timeRange_;
3746         } else {
3747           return timeRangeBuilder_.getMessage();
3748         }
3749       }
3750       /**
3751        * <code>optional .TimeRange time_range = 5;</code>
3752        */
3753       public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) {
3754         if (timeRangeBuilder_ == null) {
3755           if (value == null) {
3756             throw new NullPointerException();
3757           }
3758           timeRange_ = value;
3759           onChanged();
3760         } else {
3761           timeRangeBuilder_.setMessage(value);
3762         }
3763         bitField0_ |= 0x00000010;
3764         return this;
3765       }
3766       /**
3767        * <code>optional .TimeRange time_range = 5;</code>
3768        */
3769       public Builder setTimeRange(
3770           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) {
3771         if (timeRangeBuilder_ == null) {
3772           timeRange_ = builderForValue.build();
3773           onChanged();
3774         } else {
3775           timeRangeBuilder_.setMessage(builderForValue.build());
3776         }
3777         bitField0_ |= 0x00000010;
3778         return this;
3779       }
3780       /**
3781        * <code>optional .TimeRange time_range = 5;</code>
3782        */
3783       public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) {
3784         if (timeRangeBuilder_ == null) {
3785           if (((bitField0_ & 0x00000010) == 0x00000010) &&
3786               timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) {
3787             timeRange_ =
3788               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial();
3789           } else {
3790             timeRange_ = value;
3791           }
3792           onChanged();
3793         } else {
3794           timeRangeBuilder_.mergeFrom(value);
3795         }
3796         bitField0_ |= 0x00000010;
3797         return this;
3798       }
3799       /**
3800        * <code>optional .TimeRange time_range = 5;</code>
3801        */
3802       public Builder clearTimeRange() {
3803         if (timeRangeBuilder_ == null) {
3804           timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
3805           onChanged();
3806         } else {
3807           timeRangeBuilder_.clear();
3808         }
3809         bitField0_ = (bitField0_ & ~0x00000010);
3810         return this;
3811       }
3812       /**
3813        * <code>optional .TimeRange time_range = 5;</code>
3814        */
3815       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() {
3816         bitField0_ |= 0x00000010;
3817         onChanged();
3818         return getTimeRangeFieldBuilder().getBuilder();
3819       }
3820       /**
3821        * <code>optional .TimeRange time_range = 5;</code>
3822        */
3823       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() {
3824         if (timeRangeBuilder_ != null) {
3825           return timeRangeBuilder_.getMessageOrBuilder();
3826         } else {
3827           return timeRange_;
3828         }
3829       }
3830       /**
3831        * <code>optional .TimeRange time_range = 5;</code>
3832        */
3833       private com.google.protobuf.SingleFieldBuilder<
3834           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> 
3835           getTimeRangeFieldBuilder() {
3836         if (timeRangeBuilder_ == null) {
3837           timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder<
3838               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>(
3839                   timeRange_,
3840                   getParentForChildren(),
3841                   isClean());
3842           timeRange_ = null;
3843         }
3844         return timeRangeBuilder_;
3845       }
3846 
3847       // optional uint32 max_versions = 6 [default = 1];
3848       private int maxVersions_ = 1;
3849       /**
3850        * <code>optional uint32 max_versions = 6 [default = 1];</code>
3851        */
3852       public boolean hasMaxVersions() {
3853         return ((bitField0_ & 0x00000020) == 0x00000020);
3854       }
3855       /**
3856        * <code>optional uint32 max_versions = 6 [default = 1];</code>
3857        */
3858       public int getMaxVersions() {
3859         return maxVersions_;
3860       }
3861       /**
3862        * <code>optional uint32 max_versions = 6 [default = 1];</code>
3863        */
3864       public Builder setMaxVersions(int value) {
3865         bitField0_ |= 0x00000020;
3866         maxVersions_ = value;
3867         onChanged();
3868         return this;
3869       }
3870       /**
3871        * <code>optional uint32 max_versions = 6 [default = 1];</code>
3872        */
3873       public Builder clearMaxVersions() {
3874         bitField0_ = (bitField0_ & ~0x00000020);
3875         maxVersions_ = 1;
3876         onChanged();
3877         return this;
3878       }
3879 
3880       // optional bool cache_blocks = 7 [default = true];
3881       private boolean cacheBlocks_ = true;
3882       /**
3883        * <code>optional bool cache_blocks = 7 [default = true];</code>
3884        */
3885       public boolean hasCacheBlocks() {
3886         return ((bitField0_ & 0x00000040) == 0x00000040);
3887       }
3888       /**
3889        * <code>optional bool cache_blocks = 7 [default = true];</code>
3890        */
3891       public boolean getCacheBlocks() {
3892         return cacheBlocks_;
3893       }
3894       /**
3895        * <code>optional bool cache_blocks = 7 [default = true];</code>
3896        */
3897       public Builder setCacheBlocks(boolean value) {
3898         bitField0_ |= 0x00000040;
3899         cacheBlocks_ = value;
3900         onChanged();
3901         return this;
3902       }
3903       /**
3904        * <code>optional bool cache_blocks = 7 [default = true];</code>
3905        */
3906       public Builder clearCacheBlocks() {
3907         bitField0_ = (bitField0_ & ~0x00000040);
3908         cacheBlocks_ = true;
3909         onChanged();
3910         return this;
3911       }
3912 
3913       // optional uint32 store_limit = 8;
3914       private int storeLimit_ ;
3915       /**
3916        * <code>optional uint32 store_limit = 8;</code>
3917        */
3918       public boolean hasStoreLimit() {
3919         return ((bitField0_ & 0x00000080) == 0x00000080);
3920       }
3921       /**
3922        * <code>optional uint32 store_limit = 8;</code>
3923        */
3924       public int getStoreLimit() {
3925         return storeLimit_;
3926       }
3927       /**
3928        * <code>optional uint32 store_limit = 8;</code>
3929        */
3930       public Builder setStoreLimit(int value) {
3931         bitField0_ |= 0x00000080;
3932         storeLimit_ = value;
3933         onChanged();
3934         return this;
3935       }
3936       /**
3937        * <code>optional uint32 store_limit = 8;</code>
3938        */
3939       public Builder clearStoreLimit() {
3940         bitField0_ = (bitField0_ & ~0x00000080);
3941         storeLimit_ = 0;
3942         onChanged();
3943         return this;
3944       }
3945 
3946       // optional uint32 store_offset = 9;
3947       private int storeOffset_ ;
3948       /**
3949        * <code>optional uint32 store_offset = 9;</code>
3950        */
3951       public boolean hasStoreOffset() {
3952         return ((bitField0_ & 0x00000100) == 0x00000100);
3953       }
3954       /**
3955        * <code>optional uint32 store_offset = 9;</code>
3956        */
3957       public int getStoreOffset() {
3958         return storeOffset_;
3959       }
3960       /**
3961        * <code>optional uint32 store_offset = 9;</code>
3962        */
3963       public Builder setStoreOffset(int value) {
3964         bitField0_ |= 0x00000100;
3965         storeOffset_ = value;
3966         onChanged();
3967         return this;
3968       }
3969       /**
3970        * <code>optional uint32 store_offset = 9;</code>
3971        */
3972       public Builder clearStoreOffset() {
3973         bitField0_ = (bitField0_ & ~0x00000100);
3974         storeOffset_ = 0;
3975         onChanged();
3976         return this;
3977       }
3978 
3979       // optional bool existence_only = 10 [default = false];
3980       private boolean existenceOnly_ ;
3981       /**
3982        * <code>optional bool existence_only = 10 [default = false];</code>
3983        *
3984        * <pre>
3985        * The result isn't asked for, just check for
3986        * the existence.
3987        * </pre>
3988        */
3989       public boolean hasExistenceOnly() {
3990         return ((bitField0_ & 0x00000200) == 0x00000200);
3991       }
3992       /**
3993        * <code>optional bool existence_only = 10 [default = false];</code>
3994        *
3995        * <pre>
3996        * The result isn't asked for, just check for
3997        * the existence.
3998        * </pre>
3999        */
4000       public boolean getExistenceOnly() {
4001         return existenceOnly_;
4002       }
4003       /**
4004        * <code>optional bool existence_only = 10 [default = false];</code>
4005        *
4006        * <pre>
4007        * The result isn't asked for, just check for
4008        * the existence.
4009        * </pre>
4010        */
4011       public Builder setExistenceOnly(boolean value) {
4012         bitField0_ |= 0x00000200;
4013         existenceOnly_ = value;
4014         onChanged();
4015         return this;
4016       }
4017       /**
4018        * <code>optional bool existence_only = 10 [default = false];</code>
4019        *
4020        * <pre>
4021        * The result isn't asked for, just check for
4022        * the existence.
4023        * </pre>
4024        */
4025       public Builder clearExistenceOnly() {
4026         bitField0_ = (bitField0_ & ~0x00000200);
4027         existenceOnly_ = false;
4028         onChanged();
4029         return this;
4030       }
4031 
4032       // optional bool closest_row_before = 11 [default = false];
4033       private boolean closestRowBefore_ ;
4034       /**
4035        * <code>optional bool closest_row_before = 11 [default = false];</code>
4036        *
4037        * <pre>
4038        * If the row to get doesn't exist, return the
4039        * closest row before.
4040        * </pre>
4041        */
4042       public boolean hasClosestRowBefore() {
4043         return ((bitField0_ & 0x00000400) == 0x00000400);
4044       }
4045       /**
4046        * <code>optional bool closest_row_before = 11 [default = false];</code>
4047        *
4048        * <pre>
4049        * If the row to get doesn't exist, return the
4050        * closest row before.
4051        * </pre>
4052        */
4053       public boolean getClosestRowBefore() {
4054         return closestRowBefore_;
4055       }
4056       /**
4057        * <code>optional bool closest_row_before = 11 [default = false];</code>
4058        *
4059        * <pre>
4060        * If the row to get doesn't exist, return the
4061        * closest row before.
4062        * </pre>
4063        */
4064       public Builder setClosestRowBefore(boolean value) {
4065         bitField0_ |= 0x00000400;
4066         closestRowBefore_ = value;
4067         onChanged();
4068         return this;
4069       }
4070       /**
4071        * <code>optional bool closest_row_before = 11 [default = false];</code>
4072        *
4073        * <pre>
4074        * If the row to get doesn't exist, return the
4075        * closest row before.
4076        * </pre>
4077        */
4078       public Builder clearClosestRowBefore() {
4079         bitField0_ = (bitField0_ & ~0x00000400);
4080         closestRowBefore_ = false;
4081         onChanged();
4082         return this;
4083       }
4084 
4085       // optional .Consistency consistency = 12 [default = STRONG];
4086       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
4087       /**
4088        * <code>optional .Consistency consistency = 12 [default = STRONG];</code>
4089        */
4090       public boolean hasConsistency() {
4091         return ((bitField0_ & 0x00000800) == 0x00000800);
4092       }
4093       /**
4094        * <code>optional .Consistency consistency = 12 [default = STRONG];</code>
4095        */
4096       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency() {
4097         return consistency_;
4098       }
4099       /**
4100        * <code>optional .Consistency consistency = 12 [default = STRONG];</code>
4101        */
4102       public Builder setConsistency(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency value) {
4103         if (value == null) {
4104           throw new NullPointerException();
4105         }
4106         bitField0_ |= 0x00000800;
4107         consistency_ = value;
4108         onChanged();
4109         return this;
4110       }
4111       /**
4112        * <code>optional .Consistency consistency = 12 [default = STRONG];</code>
4113        */
4114       public Builder clearConsistency() {
4115         bitField0_ = (bitField0_ & ~0x00000800);
4116         consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
4117         onChanged();
4118         return this;
4119       }
4120 
4121       // @@protoc_insertion_point(builder_scope:Get)
4122     }
4123 
4124     static {
4125       defaultInstance = new Get(true);
4126       defaultInstance.initFields();
4127     }
4128 
4129     // @@protoc_insertion_point(class_scope:Get)
4130   }
4131 
4132   public interface ResultOrBuilder
4133       extends com.google.protobuf.MessageOrBuilder {
4134 
4135     // repeated .Cell cell = 1;
4136     /**
4137      * <code>repeated .Cell cell = 1;</code>
4138      *
4139      * <pre>
4140      * Result includes the Cells or else it just has a count of Cells
4141      * that are carried otherwise.
4142      * </pre>
4143      */
4144     java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> 
4145         getCellList();
4146     /**
4147      * <code>repeated .Cell cell = 1;</code>
4148      *
4149      * <pre>
4150      * Result includes the Cells or else it just has a count of Cells
4151      * that are carried otherwise.
4152      * </pre>
4153      */
4154     org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell getCell(int index);
4155     /**
4156      * <code>repeated .Cell cell = 1;</code>
4157      *
4158      * <pre>
4159      * Result includes the Cells or else it just has a count of Cells
4160      * that are carried otherwise.
4161      * </pre>
4162      */
4163     int getCellCount();
4164     /**
4165      * <code>repeated .Cell cell = 1;</code>
4166      *
4167      * <pre>
4168      * Result includes the Cells or else it just has a count of Cells
4169      * that are carried otherwise.
4170      * </pre>
4171      */
4172     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder> 
4173         getCellOrBuilderList();
4174     /**
4175      * <code>repeated .Cell cell = 1;</code>
4176      *
4177      * <pre>
4178      * Result includes the Cells or else it just has a count of Cells
4179      * that are carried otherwise.
4180      * </pre>
4181      */
4182     org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder getCellOrBuilder(
4183         int index);
4184 
4185     // optional int32 associated_cell_count = 2;
4186     /**
4187      * <code>optional int32 associated_cell_count = 2;</code>
4188      *
4189      * <pre>
4190      * The below count is set when the associated cells are
4191      * not part of this protobuf message; they are passed alongside
4192      * and then this Message is just a placeholder with metadata.
4193      * The count is needed to know how many to peel off the block of Cells as
4194      * ours.  NOTE: This is different from the pb managed cell_count of the
4195      * 'cell' field above which is non-null when the cells are pb'd.
4196      * </pre>
4197      */
4198     boolean hasAssociatedCellCount();
4199     /**
4200      * <code>optional int32 associated_cell_count = 2;</code>
4201      *
4202      * <pre>
4203      * The below count is set when the associated cells are
4204      * not part of this protobuf message; they are passed alongside
4205      * and then this Message is just a placeholder with metadata.
4206      * The count is needed to know how many to peel off the block of Cells as
4207      * ours.  NOTE: This is different from the pb managed cell_count of the
4208      * 'cell' field above which is non-null when the cells are pb'd.
4209      * </pre>
4210      */
4211     int getAssociatedCellCount();
4212 
4213     // optional bool exists = 3;
4214     /**
4215      * <code>optional bool exists = 3;</code>
4216      *
4217      * <pre>
4218      * used for Get to check existence only. Not set if existence_only was not set to true
4219      *  in the query.
4220      * </pre>
4221      */
4222     boolean hasExists();
4223     /**
4224      * <code>optional bool exists = 3;</code>
4225      *
4226      * <pre>
4227      * used for Get to check existence only. Not set if existence_only was not set to true
4228      *  in the query.
4229      * </pre>
4230      */
4231     boolean getExists();
4232 
4233     // optional bool stale = 4 [default = false];
4234     /**
4235      * <code>optional bool stale = 4 [default = false];</code>
4236      *
4237      * <pre>
4238      * Whether or not the results are coming from possibly stale data 
4239      * </pre>
4240      */
4241     boolean hasStale();
4242     /**
4243      * <code>optional bool stale = 4 [default = false];</code>
4244      *
4245      * <pre>
4246      * Whether or not the results are coming from possibly stale data 
4247      * </pre>
4248      */
4249     boolean getStale();
4250 
4251     // optional bool partial = 5 [default = false];
4252     /**
4253      * <code>optional bool partial = 5 [default = false];</code>
4254      *
4255      * <pre>
4256      * Whether or not the entire result could be returned. Results will be split when
4257      * the RPC chunk size limit is reached. Partial results contain only a subset of the
4258      * cells for a row and must be combined with a result containing the remaining cells
4259      * to form a complete result
4260      * </pre>
4261      */
4262     boolean hasPartial();
4263     /**
4264      * <code>optional bool partial = 5 [default = false];</code>
4265      *
4266      * <pre>
4267      * Whether or not the entire result could be returned. Results will be split when
4268      * the RPC chunk size limit is reached. Partial results contain only a subset of the
4269      * cells for a row and must be combined with a result containing the remaining cells
4270      * to form a complete result
4271      * </pre>
4272      */
4273     boolean getPartial();
4274   }
4275   /**
4276    * Protobuf type {@code Result}
4277    */
4278   public static final class Result extends
4279       com.google.protobuf.GeneratedMessage
4280       implements ResultOrBuilder {
4281     // Use Result.newBuilder() to construct.
4282     private Result(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
4283       super(builder);
4284       this.unknownFields = builder.getUnknownFields();
4285     }
4286     private Result(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
4287 
4288     private static final Result defaultInstance;
4289     public static Result getDefaultInstance() {
4290       return defaultInstance;
4291     }
4292 
4293     public Result getDefaultInstanceForType() {
4294       return defaultInstance;
4295     }
4296 
4297     private final com.google.protobuf.UnknownFieldSet unknownFields;
4298     @java.lang.Override
4299     public final com.google.protobuf.UnknownFieldSet
4300         getUnknownFields() {
4301       return this.unknownFields;
4302     }
4303     private Result(
4304         com.google.protobuf.CodedInputStream input,
4305         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4306         throws com.google.protobuf.InvalidProtocolBufferException {
4307       initFields();
4308       int mutable_bitField0_ = 0;
4309       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
4310           com.google.protobuf.UnknownFieldSet.newBuilder();
4311       try {
4312         boolean done = false;
4313         while (!done) {
4314           int tag = input.readTag();
4315           switch (tag) {
4316             case 0:
4317               done = true;
4318               break;
4319             default: {
4320               if (!parseUnknownField(input, unknownFields,
4321                                      extensionRegistry, tag)) {
4322                 done = true;
4323               }
4324               break;
4325             }
4326             case 10: {
4327               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
4328                 cell_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell>();
4329                 mutable_bitField0_ |= 0x00000001;
4330               }
4331               cell_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.PARSER, extensionRegistry));
4332               break;
4333             }
4334             case 16: {
4335               bitField0_ |= 0x00000001;
4336               associatedCellCount_ = input.readInt32();
4337               break;
4338             }
4339             case 24: {
4340               bitField0_ |= 0x00000002;
4341               exists_ = input.readBool();
4342               break;
4343             }
4344             case 32: {
4345               bitField0_ |= 0x00000004;
4346               stale_ = input.readBool();
4347               break;
4348             }
4349             case 40: {
4350               bitField0_ |= 0x00000008;
4351               partial_ = input.readBool();
4352               break;
4353             }
4354           }
4355         }
4356       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4357         throw e.setUnfinishedMessage(this);
4358       } catch (java.io.IOException e) {
4359         throw new com.google.protobuf.InvalidProtocolBufferException(
4360             e.getMessage()).setUnfinishedMessage(this);
4361       } finally {
4362         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
4363           cell_ = java.util.Collections.unmodifiableList(cell_);
4364         }
4365         this.unknownFields = unknownFields.build();
4366         makeExtensionsImmutable();
4367       }
4368     }
4369     public static final com.google.protobuf.Descriptors.Descriptor
4370         getDescriptor() {
4371       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_descriptor;
4372     }
4373 
4374     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4375         internalGetFieldAccessorTable() {
4376       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_fieldAccessorTable
4377           .ensureFieldAccessorsInitialized(
4378               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder.class);
4379     }
4380 
4381     public static com.google.protobuf.Parser<Result> PARSER =
4382         new com.google.protobuf.AbstractParser<Result>() {
4383       public Result parsePartialFrom(
4384           com.google.protobuf.CodedInputStream input,
4385           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4386           throws com.google.protobuf.InvalidProtocolBufferException {
4387         return new Result(input, extensionRegistry);
4388       }
4389     };
4390 
4391     @java.lang.Override
4392     public com.google.protobuf.Parser<Result> getParserForType() {
4393       return PARSER;
4394     }
4395 
4396     private int bitField0_;
4397     // repeated .Cell cell = 1;
4398     public static final int CELL_FIELD_NUMBER = 1;
4399     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> cell_;
4400     /**
4401      * <code>repeated .Cell cell = 1;</code>
4402      *
4403      * <pre>
4404      * Result includes the Cells or else it just has a count of Cells
4405      * that are carried otherwise.
4406      * </pre>
4407      */
4408     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> getCellList() {
4409       return cell_;
4410     }
4411     /**
4412      * <code>repeated .Cell cell = 1;</code>
4413      *
4414      * <pre>
4415      * Result includes the Cells or else it just has a count of Cells
4416      * that are carried otherwise.
4417      * </pre>
4418      */
4419     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder> 
4420         getCellOrBuilderList() {
4421       return cell_;
4422     }
4423     /**
4424      * <code>repeated .Cell cell = 1;</code>
4425      *
4426      * <pre>
4427      * Result includes the Cells or else it just has a count of Cells
4428      * that are carried otherwise.
4429      * </pre>
4430      */
4431     public int getCellCount() {
4432       return cell_.size();
4433     }
4434     /**
4435      * <code>repeated .Cell cell = 1;</code>
4436      *
4437      * <pre>
4438      * Result includes the Cells or else it just has a count of Cells
4439      * that are carried otherwise.
4440      * </pre>
4441      */
4442     public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell getCell(int index) {
4443       return cell_.get(index);
4444     }
4445     /**
4446      * <code>repeated .Cell cell = 1;</code>
4447      *
4448      * <pre>
4449      * Result includes the Cells or else it just has a count of Cells
4450      * that are carried otherwise.
4451      * </pre>
4452      */
4453     public org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder getCellOrBuilder(
4454         int index) {
4455       return cell_.get(index);
4456     }
4457 
4458     // optional int32 associated_cell_count = 2;
4459     public static final int ASSOCIATED_CELL_COUNT_FIELD_NUMBER = 2;
4460     private int associatedCellCount_;
4461     /**
4462      * <code>optional int32 associated_cell_count = 2;</code>
4463      *
4464      * <pre>
4465      * The below count is set when the associated cells are
4466      * not part of this protobuf message; they are passed alongside
4467      * and then this Message is just a placeholder with metadata.
4468      * The count is needed to know how many to peel off the block of Cells as
4469      * ours.  NOTE: This is different from the pb managed cell_count of the
4470      * 'cell' field above which is non-null when the cells are pb'd.
4471      * </pre>
4472      */
4473     public boolean hasAssociatedCellCount() {
4474       return ((bitField0_ & 0x00000001) == 0x00000001);
4475     }
4476     /**
4477      * <code>optional int32 associated_cell_count = 2;</code>
4478      *
4479      * <pre>
4480      * The below count is set when the associated cells are
4481      * not part of this protobuf message; they are passed alongside
4482      * and then this Message is just a placeholder with metadata.
4483      * The count is needed to know how many to peel off the block of Cells as
4484      * ours.  NOTE: This is different from the pb managed cell_count of the
4485      * 'cell' field above which is non-null when the cells are pb'd.
4486      * </pre>
4487      */
4488     public int getAssociatedCellCount() {
4489       return associatedCellCount_;
4490     }
4491 
4492     // optional bool exists = 3;
4493     public static final int EXISTS_FIELD_NUMBER = 3;
4494     private boolean exists_;
4495     /**
4496      * <code>optional bool exists = 3;</code>
4497      *
4498      * <pre>
4499      * used for Get to check existence only. Not set if existence_only was not set to true
4500      *  in the query.
4501      * </pre>
4502      */
4503     public boolean hasExists() {
4504       return ((bitField0_ & 0x00000002) == 0x00000002);
4505     }
4506     /**
4507      * <code>optional bool exists = 3;</code>
4508      *
4509      * <pre>
4510      * used for Get to check existence only. Not set if existence_only was not set to true
4511      *  in the query.
4512      * </pre>
4513      */
4514     public boolean getExists() {
4515       return exists_;
4516     }
4517 
4518     // optional bool stale = 4 [default = false];
4519     public static final int STALE_FIELD_NUMBER = 4;
4520     private boolean stale_;
4521     /**
4522      * <code>optional bool stale = 4 [default = false];</code>
4523      *
4524      * <pre>
4525      * Whether or not the results are coming from possibly stale data 
4526      * </pre>
4527      */
4528     public boolean hasStale() {
4529       return ((bitField0_ & 0x00000004) == 0x00000004);
4530     }
4531     /**
4532      * <code>optional bool stale = 4 [default = false];</code>
4533      *
4534      * <pre>
4535      * Whether or not the results are coming from possibly stale data 
4536      * </pre>
4537      */
4538     public boolean getStale() {
4539       return stale_;
4540     }
4541 
4542     // optional bool partial = 5 [default = false];
4543     public static final int PARTIAL_FIELD_NUMBER = 5;
4544     private boolean partial_;
4545     /**
4546      * <code>optional bool partial = 5 [default = false];</code>
4547      *
4548      * <pre>
4549      * Whether or not the entire result could be returned. Results will be split when
4550      * the RPC chunk size limit is reached. Partial results contain only a subset of the
4551      * cells for a row and must be combined with a result containing the remaining cells
4552      * to form a complete result
4553      * </pre>
4554      */
4555     public boolean hasPartial() {
4556       return ((bitField0_ & 0x00000008) == 0x00000008);
4557     }
4558     /**
4559      * <code>optional bool partial = 5 [default = false];</code>
4560      *
4561      * <pre>
4562      * Whether or not the entire result could be returned. Results will be split when
4563      * the RPC chunk size limit is reached. Partial results contain only a subset of the
4564      * cells for a row and must be combined with a result containing the remaining cells
4565      * to form a complete result
4566      * </pre>
4567      */
4568     public boolean getPartial() {
4569       return partial_;
4570     }
4571 
4572     private void initFields() {
4573       cell_ = java.util.Collections.emptyList();
4574       associatedCellCount_ = 0;
4575       exists_ = false;
4576       stale_ = false;
4577       partial_ = false;
4578     }
4579     private byte memoizedIsInitialized = -1;
4580     public final boolean isInitialized() {
4581       byte isInitialized = memoizedIsInitialized;
4582       if (isInitialized != -1) return isInitialized == 1;
4583 
4584       memoizedIsInitialized = 1;
4585       return true;
4586     }
4587 
4588     public void writeTo(com.google.protobuf.CodedOutputStream output)
4589                         throws java.io.IOException {
4590       getSerializedSize();
4591       for (int i = 0; i < cell_.size(); i++) {
4592         output.writeMessage(1, cell_.get(i));
4593       }
4594       if (((bitField0_ & 0x00000001) == 0x00000001)) {
4595         output.writeInt32(2, associatedCellCount_);
4596       }
4597       if (((bitField0_ & 0x00000002) == 0x00000002)) {
4598         output.writeBool(3, exists_);
4599       }
4600       if (((bitField0_ & 0x00000004) == 0x00000004)) {
4601         output.writeBool(4, stale_);
4602       }
4603       if (((bitField0_ & 0x00000008) == 0x00000008)) {
4604         output.writeBool(5, partial_);
4605       }
4606       getUnknownFields().writeTo(output);
4607     }
4608 
4609     private int memoizedSerializedSize = -1;
4610     public int getSerializedSize() {
4611       int size = memoizedSerializedSize;
4612       if (size != -1) return size;
4613 
4614       size = 0;
4615       for (int i = 0; i < cell_.size(); i++) {
4616         size += com.google.protobuf.CodedOutputStream
4617           .computeMessageSize(1, cell_.get(i));
4618       }
4619       if (((bitField0_ & 0x00000001) == 0x00000001)) {
4620         size += com.google.protobuf.CodedOutputStream
4621           .computeInt32Size(2, associatedCellCount_);
4622       }
4623       if (((bitField0_ & 0x00000002) == 0x00000002)) {
4624         size += com.google.protobuf.CodedOutputStream
4625           .computeBoolSize(3, exists_);
4626       }
4627       if (((bitField0_ & 0x00000004) == 0x00000004)) {
4628         size += com.google.protobuf.CodedOutputStream
4629           .computeBoolSize(4, stale_);
4630       }
4631       if (((bitField0_ & 0x00000008) == 0x00000008)) {
4632         size += com.google.protobuf.CodedOutputStream
4633           .computeBoolSize(5, partial_);
4634       }
4635       size += getUnknownFields().getSerializedSize();
4636       memoizedSerializedSize = size;
4637       return size;
4638     }
4639 
4640     private static final long serialVersionUID = 0L;
4641     @java.lang.Override
4642     protected java.lang.Object writeReplace()
4643         throws java.io.ObjectStreamException {
4644       return super.writeReplace();
4645     }
4646 
4647     @java.lang.Override
4648     public boolean equals(final java.lang.Object obj) {
4649       if (obj == this) {
4650        return true;
4651       }
4652       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result)) {
4653         return super.equals(obj);
4654       }
4655       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) obj;
4656 
4657       boolean result = true;
4658       result = result && getCellList()
4659           .equals(other.getCellList());
4660       result = result && (hasAssociatedCellCount() == other.hasAssociatedCellCount());
4661       if (hasAssociatedCellCount()) {
4662         result = result && (getAssociatedCellCount()
4663             == other.getAssociatedCellCount());
4664       }
4665       result = result && (hasExists() == other.hasExists());
4666       if (hasExists()) {
4667         result = result && (getExists()
4668             == other.getExists());
4669       }
4670       result = result && (hasStale() == other.hasStale());
4671       if (hasStale()) {
4672         result = result && (getStale()
4673             == other.getStale());
4674       }
4675       result = result && (hasPartial() == other.hasPartial());
4676       if (hasPartial()) {
4677         result = result && (getPartial()
4678             == other.getPartial());
4679       }
4680       result = result &&
4681           getUnknownFields().equals(other.getUnknownFields());
4682       return result;
4683     }
4684 
4685     private int memoizedHashCode = 0;
4686     @java.lang.Override
4687     public int hashCode() {
4688       if (memoizedHashCode != 0) {
4689         return memoizedHashCode;
4690       }
4691       int hash = 41;
4692       hash = (19 * hash) + getDescriptorForType().hashCode();
4693       if (getCellCount() > 0) {
4694         hash = (37 * hash) + CELL_FIELD_NUMBER;
4695         hash = (53 * hash) + getCellList().hashCode();
4696       }
4697       if (hasAssociatedCellCount()) {
4698         hash = (37 * hash) + ASSOCIATED_CELL_COUNT_FIELD_NUMBER;
4699         hash = (53 * hash) + getAssociatedCellCount();
4700       }
4701       if (hasExists()) {
4702         hash = (37 * hash) + EXISTS_FIELD_NUMBER;
4703         hash = (53 * hash) + hashBoolean(getExists());
4704       }
4705       if (hasStale()) {
4706         hash = (37 * hash) + STALE_FIELD_NUMBER;
4707         hash = (53 * hash) + hashBoolean(getStale());
4708       }
4709       if (hasPartial()) {
4710         hash = (37 * hash) + PARTIAL_FIELD_NUMBER;
4711         hash = (53 * hash) + hashBoolean(getPartial());
4712       }
4713       hash = (29 * hash) + getUnknownFields().hashCode();
4714       memoizedHashCode = hash;
4715       return hash;
4716     }
4717 
4718     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(
4719         com.google.protobuf.ByteString data)
4720         throws com.google.protobuf.InvalidProtocolBufferException {
4721       return PARSER.parseFrom(data);
4722     }
4723     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(
4724         com.google.protobuf.ByteString data,
4725         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4726         throws com.google.protobuf.InvalidProtocolBufferException {
4727       return PARSER.parseFrom(data, extensionRegistry);
4728     }
4729     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(byte[] data)
4730         throws com.google.protobuf.InvalidProtocolBufferException {
4731       return PARSER.parseFrom(data);
4732     }
4733     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(
4734         byte[] data,
4735         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4736         throws com.google.protobuf.InvalidProtocolBufferException {
4737       return PARSER.parseFrom(data, extensionRegistry);
4738     }
4739     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(java.io.InputStream input)
4740         throws java.io.IOException {
4741       return PARSER.parseFrom(input);
4742     }
4743     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(
4744         java.io.InputStream input,
4745         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4746         throws java.io.IOException {
4747       return PARSER.parseFrom(input, extensionRegistry);
4748     }
4749     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseDelimitedFrom(java.io.InputStream input)
4750         throws java.io.IOException {
4751       return PARSER.parseDelimitedFrom(input);
4752     }
4753     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseDelimitedFrom(
4754         java.io.InputStream input,
4755         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4756         throws java.io.IOException {
4757       return PARSER.parseDelimitedFrom(input, extensionRegistry);
4758     }
4759     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(
4760         com.google.protobuf.CodedInputStream input)
4761         throws java.io.IOException {
4762       return PARSER.parseFrom(input);
4763     }
4764     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(
4765         com.google.protobuf.CodedInputStream input,
4766         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4767         throws java.io.IOException {
4768       return PARSER.parseFrom(input, extensionRegistry);
4769     }
4770 
4771     public static Builder newBuilder() { return Builder.create(); }
4772     public Builder newBuilderForType() { return newBuilder(); }
4773     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result prototype) {
4774       return newBuilder().mergeFrom(prototype);
4775     }
4776     public Builder toBuilder() { return newBuilder(this); }
4777 
4778     @java.lang.Override
4779     protected Builder newBuilderForType(
4780         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4781       Builder builder = new Builder(parent);
4782       return builder;
4783     }
4784     /**
4785      * Protobuf type {@code Result}
4786      */
4787     public static final class Builder extends
4788         com.google.protobuf.GeneratedMessage.Builder<Builder>
4789        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder {
4790       public static final com.google.protobuf.Descriptors.Descriptor
4791           getDescriptor() {
4792         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_descriptor;
4793       }
4794 
4795       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
4796           internalGetFieldAccessorTable() {
4797         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_fieldAccessorTable
4798             .ensureFieldAccessorsInitialized(
4799                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder.class);
4800       }
4801 
4802       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder()
4803       private Builder() {
4804         maybeForceBuilderInitialization();
4805       }
4806 
4807       private Builder(
4808           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4809         super(parent);
4810         maybeForceBuilderInitialization();
4811       }
4812       private void maybeForceBuilderInitialization() {
4813         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4814           getCellFieldBuilder();
4815         }
4816       }
4817       private static Builder create() {
4818         return new Builder();
4819       }
4820 
4821       public Builder clear() {
4822         super.clear();
4823         if (cellBuilder_ == null) {
4824           cell_ = java.util.Collections.emptyList();
4825           bitField0_ = (bitField0_ & ~0x00000001);
4826         } else {
4827           cellBuilder_.clear();
4828         }
4829         associatedCellCount_ = 0;
4830         bitField0_ = (bitField0_ & ~0x00000002);
4831         exists_ = false;
4832         bitField0_ = (bitField0_ & ~0x00000004);
4833         stale_ = false;
4834         bitField0_ = (bitField0_ & ~0x00000008);
4835         partial_ = false;
4836         bitField0_ = (bitField0_ & ~0x00000010);
4837         return this;
4838       }
4839 
4840       public Builder clone() {
4841         return create().mergeFrom(buildPartial());
4842       }
4843 
4844       public com.google.protobuf.Descriptors.Descriptor
4845           getDescriptorForType() {
4846         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_descriptor;
4847       }
4848 
4849       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getDefaultInstanceForType() {
4850         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
4851       }
4852 
4853       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result build() {
4854         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = buildPartial();
4855         if (!result.isInitialized()) {
4856           throw newUninitializedMessageException(result);
4857         }
4858         return result;
4859       }
4860 
4861       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result buildPartial() {
4862         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result(this);
4863         int from_bitField0_ = bitField0_;
4864         int to_bitField0_ = 0;
4865         if (cellBuilder_ == null) {
4866           if (((bitField0_ & 0x00000001) == 0x00000001)) {
4867             cell_ = java.util.Collections.unmodifiableList(cell_);
4868             bitField0_ = (bitField0_ & ~0x00000001);
4869           }
4870           result.cell_ = cell_;
4871         } else {
4872           result.cell_ = cellBuilder_.build();
4873         }
4874         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
4875           to_bitField0_ |= 0x00000001;
4876         }
4877         result.associatedCellCount_ = associatedCellCount_;
4878         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
4879           to_bitField0_ |= 0x00000002;
4880         }
4881         result.exists_ = exists_;
4882         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
4883           to_bitField0_ |= 0x00000004;
4884         }
4885         result.stale_ = stale_;
4886         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
4887           to_bitField0_ |= 0x00000008;
4888         }
4889         result.partial_ = partial_;
4890         result.bitField0_ = to_bitField0_;
4891         onBuilt();
4892         return result;
4893       }
4894 
4895       public Builder mergeFrom(com.google.protobuf.Message other) {
4896         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) {
4897           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result)other);
4898         } else {
4899           super.mergeFrom(other);
4900           return this;
4901         }
4902       }
4903 
4904       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result other) {
4905         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) return this;
4906         if (cellBuilder_ == null) {
4907           if (!other.cell_.isEmpty()) {
4908             if (cell_.isEmpty()) {
4909               cell_ = other.cell_;
4910               bitField0_ = (bitField0_ & ~0x00000001);
4911             } else {
4912               ensureCellIsMutable();
4913               cell_.addAll(other.cell_);
4914             }
4915             onChanged();
4916           }
4917         } else {
4918           if (!other.cell_.isEmpty()) {
4919             if (cellBuilder_.isEmpty()) {
4920               cellBuilder_.dispose();
4921               cellBuilder_ = null;
4922               cell_ = other.cell_;
4923               bitField0_ = (bitField0_ & ~0x00000001);
4924               cellBuilder_ = 
4925                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
4926                    getCellFieldBuilder() : null;
4927             } else {
4928               cellBuilder_.addAllMessages(other.cell_);
4929             }
4930           }
4931         }
4932         if (other.hasAssociatedCellCount()) {
4933           setAssociatedCellCount(other.getAssociatedCellCount());
4934         }
4935         if (other.hasExists()) {
4936           setExists(other.getExists());
4937         }
4938         if (other.hasStale()) {
4939           setStale(other.getStale());
4940         }
4941         if (other.hasPartial()) {
4942           setPartial(other.getPartial());
4943         }
4944         this.mergeUnknownFields(other.getUnknownFields());
4945         return this;
4946       }
4947 
4948       public final boolean isInitialized() {
4949         return true;
4950       }
4951 
4952       public Builder mergeFrom(
4953           com.google.protobuf.CodedInputStream input,
4954           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4955           throws java.io.IOException {
4956         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parsedMessage = null;
4957         try {
4958           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
4959         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4960           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) e.getUnfinishedMessage();
4961           throw e;
4962         } finally {
4963           if (parsedMessage != null) {
4964             mergeFrom(parsedMessage);
4965           }
4966         }
4967         return this;
4968       }
4969       private int bitField0_;
4970 
4971       // repeated .Cell cell = 1;
4972       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> cell_ =
4973         java.util.Collections.emptyList();
4974       private void ensureCellIsMutable() {
4975         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
4976           cell_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell>(cell_);
4977           bitField0_ |= 0x00000001;
4978          }
4979       }
4980 
4981       private com.google.protobuf.RepeatedFieldBuilder<
4982           org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder, org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder> cellBuilder_;
4983 
4984       /**
4985        * <code>repeated .Cell cell = 1;</code>
4986        *
4987        * <pre>
4988        * Result includes the Cells or else it just has a count of Cells
4989        * that are carried otherwise.
4990        * </pre>
4991        */
4992       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> getCellList() {
4993         if (cellBuilder_ == null) {
4994           return java.util.Collections.unmodifiableList(cell_);
4995         } else {
4996           return cellBuilder_.getMessageList();
4997         }
4998       }
4999       /**
5000        * <code>repeated .Cell cell = 1;</code>
5001        *
5002        * <pre>
5003        * Result includes the Cells or else it just has a count of Cells
5004        * that are carried otherwise.
5005        * </pre>
5006        */
5007       public int getCellCount() {
5008         if (cellBuilder_ == null) {
5009           return cell_.size();
5010         } else {
5011           return cellBuilder_.getCount();
5012         }
5013       }
5014       /**
5015        * <code>repeated .Cell cell = 1;</code>
5016        *
5017        * <pre>
5018        * Result includes the Cells or else it just has a count of Cells
5019        * that are carried otherwise.
5020        * </pre>
5021        */
5022       public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell getCell(int index) {
5023         if (cellBuilder_ == null) {
5024           return cell_.get(index);
5025         } else {
5026           return cellBuilder_.getMessage(index);
5027         }
5028       }
5029       /**
5030        * <code>repeated .Cell cell = 1;</code>
5031        *
5032        * <pre>
5033        * Result includes the Cells or else it just has a count of Cells
5034        * that are carried otherwise.
5035        * </pre>
5036        */
5037       public Builder setCell(
5038           int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell value) {
5039         if (cellBuilder_ == null) {
5040           if (value == null) {
5041             throw new NullPointerException();
5042           }
5043           ensureCellIsMutable();
5044           cell_.set(index, value);
5045           onChanged();
5046         } else {
5047           cellBuilder_.setMessage(index, value);
5048         }
5049         return this;
5050       }
5051       /**
5052        * <code>repeated .Cell cell = 1;</code>
5053        *
5054        * <pre>
5055        * Result includes the Cells or else it just has a count of Cells
5056        * that are carried otherwise.
5057        * </pre>
5058        */
5059       public Builder setCell(
5060           int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder builderForValue) {
5061         if (cellBuilder_ == null) {
5062           ensureCellIsMutable();
5063           cell_.set(index, builderForValue.build());
5064           onChanged();
5065         } else {
5066           cellBuilder_.setMessage(index, builderForValue.build());
5067         }
5068         return this;
5069       }
5070       /**
5071        * <code>repeated .Cell cell = 1;</code>
5072        *
5073        * <pre>
5074        * Result includes the Cells or else it just has a count of Cells
5075        * that are carried otherwise.
5076        * </pre>
5077        */
5078       public Builder addCell(org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell value) {
5079         if (cellBuilder_ == null) {
5080           if (value == null) {
5081             throw new NullPointerException();
5082           }
5083           ensureCellIsMutable();
5084           cell_.add(value);
5085           onChanged();
5086         } else {
5087           cellBuilder_.addMessage(value);
5088         }
5089         return this;
5090       }
5091       /**
5092        * <code>repeated .Cell cell = 1;</code>
5093        *
5094        * <pre>
5095        * Result includes the Cells or else it just has a count of Cells
5096        * that are carried otherwise.
5097        * </pre>
5098        */
5099       public Builder addCell(
5100           int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell value) {
5101         if (cellBuilder_ == null) {
5102           if (value == null) {
5103             throw new NullPointerException();
5104           }
5105           ensureCellIsMutable();
5106           cell_.add(index, value);
5107           onChanged();
5108         } else {
5109           cellBuilder_.addMessage(index, value);
5110         }
5111         return this;
5112       }
5113       /**
5114        * <code>repeated .Cell cell = 1;</code>
5115        *
5116        * <pre>
5117        * Result includes the Cells or else it just has a count of Cells
5118        * that are carried otherwise.
5119        * </pre>
5120        */
5121       public Builder addCell(
5122           org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder builderForValue) {
5123         if (cellBuilder_ == null) {
5124           ensureCellIsMutable();
5125           cell_.add(builderForValue.build());
5126           onChanged();
5127         } else {
5128           cellBuilder_.addMessage(builderForValue.build());
5129         }
5130         return this;
5131       }
5132       /**
5133        * <code>repeated .Cell cell = 1;</code>
5134        *
5135        * <pre>
5136        * Result includes the Cells or else it just has a count of Cells
5137        * that are carried otherwise.
5138        * </pre>
5139        */
5140       public Builder addCell(
5141           int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder builderForValue) {
5142         if (cellBuilder_ == null) {
5143           ensureCellIsMutable();
5144           cell_.add(index, builderForValue.build());
5145           onChanged();
5146         } else {
5147           cellBuilder_.addMessage(index, builderForValue.build());
5148         }
5149         return this;
5150       }
5151       /**
5152        * <code>repeated .Cell cell = 1;</code>
5153        *
5154        * <pre>
5155        * Result includes the Cells or else it just has a count of Cells
5156        * that are carried otherwise.
5157        * </pre>
5158        */
5159       public Builder addAllCell(
5160           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> values) {
5161         if (cellBuilder_ == null) {
5162           ensureCellIsMutable();
5163           super.addAll(values, cell_);
5164           onChanged();
5165         } else {
5166           cellBuilder_.addAllMessages(values);
5167         }
5168         return this;
5169       }
5170       /**
5171        * <code>repeated .Cell cell = 1;</code>
5172        *
5173        * <pre>
5174        * Result includes the Cells or else it just has a count of Cells
5175        * that are carried otherwise.
5176        * </pre>
5177        */
5178       public Builder clearCell() {
5179         if (cellBuilder_ == null) {
5180           cell_ = java.util.Collections.emptyList();
5181           bitField0_ = (bitField0_ & ~0x00000001);
5182           onChanged();
5183         } else {
5184           cellBuilder_.clear();
5185         }
5186         return this;
5187       }
5188       /**
5189        * <code>repeated .Cell cell = 1;</code>
5190        *
5191        * <pre>
5192        * Result includes the Cells or else it just has a count of Cells
5193        * that are carried otherwise.
5194        * </pre>
5195        */
5196       public Builder removeCell(int index) {
5197         if (cellBuilder_ == null) {
5198           ensureCellIsMutable();
5199           cell_.remove(index);
5200           onChanged();
5201         } else {
5202           cellBuilder_.remove(index);
5203         }
5204         return this;
5205       }
5206       /**
5207        * <code>repeated .Cell cell = 1;</code>
5208        *
5209        * <pre>
5210        * Result includes the Cells or else it just has a count of Cells
5211        * that are carried otherwise.
5212        * </pre>
5213        */
5214       public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder getCellBuilder(
5215           int index) {
5216         return getCellFieldBuilder().getBuilder(index);
5217       }
5218       /**
5219        * <code>repeated .Cell cell = 1;</code>
5220        *
5221        * <pre>
5222        * Result includes the Cells or else it just has a count of Cells
5223        * that are carried otherwise.
5224        * </pre>
5225        */
5226       public org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder getCellOrBuilder(
5227           int index) {
5228         if (cellBuilder_ == null) {
5229           return cell_.get(index);  } else {
5230           return cellBuilder_.getMessageOrBuilder(index);
5231         }
5232       }
5233       /**
5234        * <code>repeated .Cell cell = 1;</code>
5235        *
5236        * <pre>
5237        * Result includes the Cells or else it just has a count of Cells
5238        * that are carried otherwise.
5239        * </pre>
5240        */
5241       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder> 
5242            getCellOrBuilderList() {
5243         if (cellBuilder_ != null) {
5244           return cellBuilder_.getMessageOrBuilderList();
5245         } else {
5246           return java.util.Collections.unmodifiableList(cell_);
5247         }
5248       }
5249       /**
5250        * <code>repeated .Cell cell = 1;</code>
5251        *
5252        * <pre>
5253        * Result includes the Cells or else it just has a count of Cells
5254        * that are carried otherwise.
5255        * </pre>
5256        */
5257       public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder addCellBuilder() {
5258         return getCellFieldBuilder().addBuilder(
5259             org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.getDefaultInstance());
5260       }
5261       /**
5262        * <code>repeated .Cell cell = 1;</code>
5263        *
5264        * <pre>
5265        * Result includes the Cells or else it just has a count of Cells
5266        * that are carried otherwise.
5267        * </pre>
5268        */
5269       public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder addCellBuilder(
5270           int index) {
5271         return getCellFieldBuilder().addBuilder(
5272             index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.getDefaultInstance());
5273       }
5274       /**
5275        * <code>repeated .Cell cell = 1;</code>
5276        *
5277        * <pre>
5278        * Result includes the Cells or else it just has a count of Cells
5279        * that are carried otherwise.
5280        * </pre>
5281        */
5282       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder> 
5283            getCellBuilderList() {
5284         return getCellFieldBuilder().getBuilderList();
5285       }
5286       private com.google.protobuf.RepeatedFieldBuilder<
5287           org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder, org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder> 
5288           getCellFieldBuilder() {
5289         if (cellBuilder_ == null) {
5290           cellBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
5291               org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder, org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder>(
5292                   cell_,
5293                   ((bitField0_ & 0x00000001) == 0x00000001),
5294                   getParentForChildren(),
5295                   isClean());
5296           cell_ = null;
5297         }
5298         return cellBuilder_;
5299       }
5300 
5301       // optional int32 associated_cell_count = 2;
5302       private int associatedCellCount_ ;
5303       /**
5304        * <code>optional int32 associated_cell_count = 2;</code>
5305        *
5306        * <pre>
5307        * The below count is set when the associated cells are
5308        * not part of this protobuf message; they are passed alongside
5309        * and then this Message is just a placeholder with metadata.
5310        * The count is needed to know how many to peel off the block of Cells as
5311        * ours.  NOTE: This is different from the pb managed cell_count of the
5312        * 'cell' field above which is non-null when the cells are pb'd.
5313        * </pre>
5314        */
5315       public boolean hasAssociatedCellCount() {
5316         return ((bitField0_ & 0x00000002) == 0x00000002);
5317       }
5318       /**
5319        * <code>optional int32 associated_cell_count = 2;</code>
5320        *
5321        * <pre>
5322        * The below count is set when the associated cells are
5323        * not part of this protobuf message; they are passed alongside
5324        * and then this Message is just a placeholder with metadata.
5325        * The count is needed to know how many to peel off the block of Cells as
5326        * ours.  NOTE: This is different from the pb managed cell_count of the
5327        * 'cell' field above which is non-null when the cells are pb'd.
5328        * </pre>
5329        */
5330       public int getAssociatedCellCount() {
5331         return associatedCellCount_;
5332       }
5333       /**
5334        * <code>optional int32 associated_cell_count = 2;</code>
5335        *
5336        * <pre>
5337        * The below count is set when the associated cells are
5338        * not part of this protobuf message; they are passed alongside
5339        * and then this Message is just a placeholder with metadata.
5340        * The count is needed to know how many to peel off the block of Cells as
5341        * ours.  NOTE: This is different from the pb managed cell_count of the
5342        * 'cell' field above which is non-null when the cells are pb'd.
5343        * </pre>
5344        */
5345       public Builder setAssociatedCellCount(int value) {
5346         bitField0_ |= 0x00000002;
5347         associatedCellCount_ = value;
5348         onChanged();
5349         return this;
5350       }
5351       /**
5352        * <code>optional int32 associated_cell_count = 2;</code>
5353        *
5354        * <pre>
5355        * The below count is set when the associated cells are
5356        * not part of this protobuf message; they are passed alongside
5357        * and then this Message is just a placeholder with metadata.
5358        * The count is needed to know how many to peel off the block of Cells as
5359        * ours.  NOTE: This is different from the pb managed cell_count of the
5360        * 'cell' field above which is non-null when the cells are pb'd.
5361        * </pre>
5362        */
5363       public Builder clearAssociatedCellCount() {
5364         bitField0_ = (bitField0_ & ~0x00000002);
5365         associatedCellCount_ = 0;
5366         onChanged();
5367         return this;
5368       }
5369 
5370       // optional bool exists = 3;
5371       private boolean exists_ ;
5372       /**
5373        * <code>optional bool exists = 3;</code>
5374        *
5375        * <pre>
5376        * used for Get to check existence only. Not set if existence_only was not set to true
5377        *  in the query.
5378        * </pre>
5379        */
5380       public boolean hasExists() {
5381         return ((bitField0_ & 0x00000004) == 0x00000004);
5382       }
5383       /**
5384        * <code>optional bool exists = 3;</code>
5385        *
5386        * <pre>
5387        * used for Get to check existence only. Not set if existence_only was not set to true
5388        *  in the query.
5389        * </pre>
5390        */
5391       public boolean getExists() {
5392         return exists_;
5393       }
5394       /**
5395        * <code>optional bool exists = 3;</code>
5396        *
5397        * <pre>
5398        * used for Get to check existence only. Not set if existence_only was not set to true
5399        *  in the query.
5400        * </pre>
5401        */
5402       public Builder setExists(boolean value) {
5403         bitField0_ |= 0x00000004;
5404         exists_ = value;
5405         onChanged();
5406         return this;
5407       }
5408       /**
5409        * <code>optional bool exists = 3;</code>
5410        *
5411        * <pre>
5412        * used for Get to check existence only. Not set if existence_only was not set to true
5413        *  in the query.
5414        * </pre>
5415        */
5416       public Builder clearExists() {
5417         bitField0_ = (bitField0_ & ~0x00000004);
5418         exists_ = false;
5419         onChanged();
5420         return this;
5421       }
5422 
5423       // optional bool stale = 4 [default = false];
5424       private boolean stale_ ;
5425       /**
5426        * <code>optional bool stale = 4 [default = false];</code>
5427        *
5428        * <pre>
5429        * Whether or not the results are coming from possibly stale data 
5430        * </pre>
5431        */
5432       public boolean hasStale() {
5433         return ((bitField0_ & 0x00000008) == 0x00000008);
5434       }
5435       /**
5436        * <code>optional bool stale = 4 [default = false];</code>
5437        *
5438        * <pre>
5439        * Whether or not the results are coming from possibly stale data 
5440        * </pre>
5441        */
5442       public boolean getStale() {
5443         return stale_;
5444       }
5445       /**
5446        * <code>optional bool stale = 4 [default = false];</code>
5447        *
5448        * <pre>
5449        * Whether or not the results are coming from possibly stale data 
5450        * </pre>
5451        */
5452       public Builder setStale(boolean value) {
5453         bitField0_ |= 0x00000008;
5454         stale_ = value;
5455         onChanged();
5456         return this;
5457       }
5458       /**
5459        * <code>optional bool stale = 4 [default = false];</code>
5460        *
5461        * <pre>
5462        * Whether or not the results are coming from possibly stale data 
5463        * </pre>
5464        */
5465       public Builder clearStale() {
5466         bitField0_ = (bitField0_ & ~0x00000008);
5467         stale_ = false;
5468         onChanged();
5469         return this;
5470       }
5471 
5472       // optional bool partial = 5 [default = false];
5473       private boolean partial_ ;
5474       /**
5475        * <code>optional bool partial = 5 [default = false];</code>
5476        *
5477        * <pre>
5478        * Whether or not the entire result could be returned. Results will be split when
5479        * the RPC chunk size limit is reached. Partial results contain only a subset of the
5480        * cells for a row and must be combined with a result containing the remaining cells
5481        * to form a complete result
5482        * </pre>
5483        */
5484       public boolean hasPartial() {
5485         return ((bitField0_ & 0x00000010) == 0x00000010);
5486       }
5487       /**
5488        * <code>optional bool partial = 5 [default = false];</code>
5489        *
5490        * <pre>
5491        * Whether or not the entire result could be returned. Results will be split when
5492        * the RPC chunk size limit is reached. Partial results contain only a subset of the
5493        * cells for a row and must be combined with a result containing the remaining cells
5494        * to form a complete result
5495        * </pre>
5496        */
5497       public boolean getPartial() {
5498         return partial_;
5499       }
5500       /**
5501        * <code>optional bool partial = 5 [default = false];</code>
5502        *
5503        * <pre>
5504        * Whether or not the entire result could be returned. Results will be split when
5505        * the RPC chunk size limit is reached. Partial results contain only a subset of the
5506        * cells for a row and must be combined with a result containing the remaining cells
5507        * to form a complete result
5508        * </pre>
5509        */
5510       public Builder setPartial(boolean value) {
5511         bitField0_ |= 0x00000010;
5512         partial_ = value;
5513         onChanged();
5514         return this;
5515       }
5516       /**
5517        * <code>optional bool partial = 5 [default = false];</code>
5518        *
5519        * <pre>
5520        * Whether or not the entire result could be returned. Results will be split when
5521        * the RPC chunk size limit is reached. Partial results contain only a subset of the
5522        * cells for a row and must be combined with a result containing the remaining cells
5523        * to form a complete result
5524        * </pre>
5525        */
5526       public Builder clearPartial() {
5527         bitField0_ = (bitField0_ & ~0x00000010);
5528         partial_ = false;
5529         onChanged();
5530         return this;
5531       }
5532 
5533       // @@protoc_insertion_point(builder_scope:Result)
5534     }
5535 
5536     static {
5537       defaultInstance = new Result(true);
5538       defaultInstance.initFields();
5539     }
5540 
5541     // @@protoc_insertion_point(class_scope:Result)
5542   }
5543 
5544   public interface GetRequestOrBuilder
5545       extends com.google.protobuf.MessageOrBuilder {
5546 
5547     // required .RegionSpecifier region = 1;
5548     /**
5549      * <code>required .RegionSpecifier region = 1;</code>
5550      */
5551     boolean hasRegion();
5552     /**
5553      * <code>required .RegionSpecifier region = 1;</code>
5554      */
5555     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
5556     /**
5557      * <code>required .RegionSpecifier region = 1;</code>
5558      */
5559     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
5560 
5561     // required .Get get = 2;
5562     /**
5563      * <code>required .Get get = 2;</code>
5564      */
5565     boolean hasGet();
5566     /**
5567      * <code>required .Get get = 2;</code>
5568      */
5569     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet();
5570     /**
5571      * <code>required .Get get = 2;</code>
5572      */
5573     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder();
5574   }
5575   /**
5576    * Protobuf type {@code GetRequest}
5577    *
5578    * <pre>
5579    **
5580    * The get request. Perform a single Get operation.
5581    * </pre>
5582    */
5583   public static final class GetRequest extends
5584       com.google.protobuf.GeneratedMessage
5585       implements GetRequestOrBuilder {
5586     // Use GetRequest.newBuilder() to construct.
5587     private GetRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
5588       super(builder);
5589       this.unknownFields = builder.getUnknownFields();
5590     }
5591     private GetRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
5592 
5593     private static final GetRequest defaultInstance;
5594     public static GetRequest getDefaultInstance() {
5595       return defaultInstance;
5596     }
5597 
5598     public GetRequest getDefaultInstanceForType() {
5599       return defaultInstance;
5600     }
5601 
5602     private final com.google.protobuf.UnknownFieldSet unknownFields;
5603     @java.lang.Override
5604     public final com.google.protobuf.UnknownFieldSet
5605         getUnknownFields() {
5606       return this.unknownFields;
5607     }
5608     private GetRequest(
5609         com.google.protobuf.CodedInputStream input,
5610         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5611         throws com.google.protobuf.InvalidProtocolBufferException {
5612       initFields();
5613       int mutable_bitField0_ = 0;
5614       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
5615           com.google.protobuf.UnknownFieldSet.newBuilder();
5616       try {
5617         boolean done = false;
5618         while (!done) {
5619           int tag = input.readTag();
5620           switch (tag) {
5621             case 0:
5622               done = true;
5623               break;
5624             default: {
5625               if (!parseUnknownField(input, unknownFields,
5626                                      extensionRegistry, tag)) {
5627                 done = true;
5628               }
5629               break;
5630             }
5631             case 10: {
5632               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
5633               if (((bitField0_ & 0x00000001) == 0x00000001)) {
5634                 subBuilder = region_.toBuilder();
5635               }
5636               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
5637               if (subBuilder != null) {
5638                 subBuilder.mergeFrom(region_);
5639                 region_ = subBuilder.buildPartial();
5640               }
5641               bitField0_ |= 0x00000001;
5642               break;
5643             }
5644             case 18: {
5645               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder subBuilder = null;
5646               if (((bitField0_ & 0x00000002) == 0x00000002)) {
5647                 subBuilder = get_.toBuilder();
5648               }
5649               get_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.PARSER, extensionRegistry);
5650               if (subBuilder != null) {
5651                 subBuilder.mergeFrom(get_);
5652                 get_ = subBuilder.buildPartial();
5653               }
5654               bitField0_ |= 0x00000002;
5655               break;
5656             }
5657           }
5658         }
5659       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5660         throw e.setUnfinishedMessage(this);
5661       } catch (java.io.IOException e) {
5662         throw new com.google.protobuf.InvalidProtocolBufferException(
5663             e.getMessage()).setUnfinishedMessage(this);
5664       } finally {
5665         this.unknownFields = unknownFields.build();
5666         makeExtensionsImmutable();
5667       }
5668     }
5669     public static final com.google.protobuf.Descriptors.Descriptor
5670         getDescriptor() {
5671       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_descriptor;
5672     }
5673 
5674     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
5675         internalGetFieldAccessorTable() {
5676       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_fieldAccessorTable
5677           .ensureFieldAccessorsInitialized(
5678               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.Builder.class);
5679     }
5680 
5681     public static com.google.protobuf.Parser<GetRequest> PARSER =
5682         new com.google.protobuf.AbstractParser<GetRequest>() {
5683       public GetRequest parsePartialFrom(
5684           com.google.protobuf.CodedInputStream input,
5685           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5686           throws com.google.protobuf.InvalidProtocolBufferException {
5687         return new GetRequest(input, extensionRegistry);
5688       }
5689     };
5690 
5691     @java.lang.Override
5692     public com.google.protobuf.Parser<GetRequest> getParserForType() {
5693       return PARSER;
5694     }
5695 
5696     private int bitField0_;
5697     // required .RegionSpecifier region = 1;
5698     public static final int REGION_FIELD_NUMBER = 1;
5699     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
5700     /**
5701      * <code>required .RegionSpecifier region = 1;</code>
5702      */
5703     public boolean hasRegion() {
5704       return ((bitField0_ & 0x00000001) == 0x00000001);
5705     }
5706     /**
5707      * <code>required .RegionSpecifier region = 1;</code>
5708      */
5709     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
5710       return region_;
5711     }
5712     /**
5713      * <code>required .RegionSpecifier region = 1;</code>
5714      */
5715     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
5716       return region_;
5717     }
5718 
5719     // required .Get get = 2;
5720     public static final int GET_FIELD_NUMBER = 2;
5721     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_;
5722     /**
5723      * <code>required .Get get = 2;</code>
5724      */
5725     public boolean hasGet() {
5726       return ((bitField0_ & 0x00000002) == 0x00000002);
5727     }
5728     /**
5729      * <code>required .Get get = 2;</code>
5730      */
5731     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() {
5732       return get_;
5733     }
5734     /**
5735      * <code>required .Get get = 2;</code>
5736      */
5737     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() {
5738       return get_;
5739     }
5740 
5741     private void initFields() {
5742       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
5743       get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
5744     }
5745     private byte memoizedIsInitialized = -1;
5746     public final boolean isInitialized() {
5747       byte isInitialized = memoizedIsInitialized;
5748       if (isInitialized != -1) return isInitialized == 1;
5749 
5750       if (!hasRegion()) {
5751         memoizedIsInitialized = 0;
5752         return false;
5753       }
5754       if (!hasGet()) {
5755         memoizedIsInitialized = 0;
5756         return false;
5757       }
5758       if (!getRegion().isInitialized()) {
5759         memoizedIsInitialized = 0;
5760         return false;
5761       }
5762       if (!getGet().isInitialized()) {
5763         memoizedIsInitialized = 0;
5764         return false;
5765       }
5766       memoizedIsInitialized = 1;
5767       return true;
5768     }
5769 
5770     public void writeTo(com.google.protobuf.CodedOutputStream output)
5771                         throws java.io.IOException {
5772       getSerializedSize();
5773       if (((bitField0_ & 0x00000001) == 0x00000001)) {
5774         output.writeMessage(1, region_);
5775       }
5776       if (((bitField0_ & 0x00000002) == 0x00000002)) {
5777         output.writeMessage(2, get_);
5778       }
5779       getUnknownFields().writeTo(output);
5780     }
5781 
5782     private int memoizedSerializedSize = -1;
5783     public int getSerializedSize() {
5784       int size = memoizedSerializedSize;
5785       if (size != -1) return size;
5786 
5787       size = 0;
5788       if (((bitField0_ & 0x00000001) == 0x00000001)) {
5789         size += com.google.protobuf.CodedOutputStream
5790           .computeMessageSize(1, region_);
5791       }
5792       if (((bitField0_ & 0x00000002) == 0x00000002)) {
5793         size += com.google.protobuf.CodedOutputStream
5794           .computeMessageSize(2, get_);
5795       }
5796       size += getUnknownFields().getSerializedSize();
5797       memoizedSerializedSize = size;
5798       return size;
5799     }
5800 
5801     private static final long serialVersionUID = 0L;
5802     @java.lang.Override
5803     protected java.lang.Object writeReplace()
5804         throws java.io.ObjectStreamException {
5805       return super.writeReplace();
5806     }
5807 
5808     @java.lang.Override
5809     public boolean equals(final java.lang.Object obj) {
5810       if (obj == this) {
5811        return true;
5812       }
5813       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)) {
5814         return super.equals(obj);
5815       }
5816       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) obj;
5817 
5818       boolean result = true;
5819       result = result && (hasRegion() == other.hasRegion());
5820       if (hasRegion()) {
5821         result = result && getRegion()
5822             .equals(other.getRegion());
5823       }
5824       result = result && (hasGet() == other.hasGet());
5825       if (hasGet()) {
5826         result = result && getGet()
5827             .equals(other.getGet());
5828       }
5829       result = result &&
5830           getUnknownFields().equals(other.getUnknownFields());
5831       return result;
5832     }
5833 
5834     private int memoizedHashCode = 0;
5835     @java.lang.Override
5836     public int hashCode() {
5837       if (memoizedHashCode != 0) {
5838         return memoizedHashCode;
5839       }
5840       int hash = 41;
5841       hash = (19 * hash) + getDescriptorForType().hashCode();
5842       if (hasRegion()) {
5843         hash = (37 * hash) + REGION_FIELD_NUMBER;
5844         hash = (53 * hash) + getRegion().hashCode();
5845       }
5846       if (hasGet()) {
5847         hash = (37 * hash) + GET_FIELD_NUMBER;
5848         hash = (53 * hash) + getGet().hashCode();
5849       }
5850       hash = (29 * hash) + getUnknownFields().hashCode();
5851       memoizedHashCode = hash;
5852       return hash;
5853     }
5854 
5855     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(
5856         com.google.protobuf.ByteString data)
5857         throws com.google.protobuf.InvalidProtocolBufferException {
5858       return PARSER.parseFrom(data);
5859     }
5860     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(
5861         com.google.protobuf.ByteString data,
5862         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5863         throws com.google.protobuf.InvalidProtocolBufferException {
5864       return PARSER.parseFrom(data, extensionRegistry);
5865     }
5866     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(byte[] data)
5867         throws com.google.protobuf.InvalidProtocolBufferException {
5868       return PARSER.parseFrom(data);
5869     }
5870     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(
5871         byte[] data,
5872         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5873         throws com.google.protobuf.InvalidProtocolBufferException {
5874       return PARSER.parseFrom(data, extensionRegistry);
5875     }
5876     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(java.io.InputStream input)
5877         throws java.io.IOException {
5878       return PARSER.parseFrom(input);
5879     }
5880     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(
5881         java.io.InputStream input,
5882         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5883         throws java.io.IOException {
5884       return PARSER.parseFrom(input, extensionRegistry);
5885     }
5886     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseDelimitedFrom(java.io.InputStream input)
5887         throws java.io.IOException {
5888       return PARSER.parseDelimitedFrom(input);
5889     }
5890     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseDelimitedFrom(
5891         java.io.InputStream input,
5892         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5893         throws java.io.IOException {
5894       return PARSER.parseDelimitedFrom(input, extensionRegistry);
5895     }
5896     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(
5897         com.google.protobuf.CodedInputStream input)
5898         throws java.io.IOException {
5899       return PARSER.parseFrom(input);
5900     }
5901     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(
5902         com.google.protobuf.CodedInputStream input,
5903         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5904         throws java.io.IOException {
5905       return PARSER.parseFrom(input, extensionRegistry);
5906     }
5907 
5908     public static Builder newBuilder() { return Builder.create(); }
5909     public Builder newBuilderForType() { return newBuilder(); }
5910     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest prototype) {
5911       return newBuilder().mergeFrom(prototype);
5912     }
5913     public Builder toBuilder() { return newBuilder(this); }
5914 
5915     @java.lang.Override
5916     protected Builder newBuilderForType(
5917         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5918       Builder builder = new Builder(parent);
5919       return builder;
5920     }
5921     /**
5922      * Protobuf type {@code GetRequest}
5923      *
5924      * <pre>
5925      **
5926      * The get request. Perform a single Get operation.
5927      * </pre>
5928      */
5929     public static final class Builder extends
5930         com.google.protobuf.GeneratedMessage.Builder<Builder>
5931        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequestOrBuilder {
5932       public static final com.google.protobuf.Descriptors.Descriptor
5933           getDescriptor() {
5934         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_descriptor;
5935       }
5936 
5937       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
5938           internalGetFieldAccessorTable() {
5939         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_fieldAccessorTable
5940             .ensureFieldAccessorsInitialized(
5941                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.Builder.class);
5942       }
5943 
5944       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.newBuilder()
5945       private Builder() {
5946         maybeForceBuilderInitialization();
5947       }
5948 
5949       private Builder(
5950           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5951         super(parent);
5952         maybeForceBuilderInitialization();
5953       }
5954       private void maybeForceBuilderInitialization() {
5955         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
5956           getRegionFieldBuilder();
5957           getGetFieldBuilder();
5958         }
5959       }
5960       private static Builder create() {
5961         return new Builder();
5962       }
5963 
5964       public Builder clear() {
5965         super.clear();
5966         if (regionBuilder_ == null) {
5967           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
5968         } else {
5969           regionBuilder_.clear();
5970         }
5971         bitField0_ = (bitField0_ & ~0x00000001);
5972         if (getBuilder_ == null) {
5973           get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
5974         } else {
5975           getBuilder_.clear();
5976         }
5977         bitField0_ = (bitField0_ & ~0x00000002);
5978         return this;
5979       }
5980 
5981       public Builder clone() {
5982         return create().mergeFrom(buildPartial());
5983       }
5984 
5985       public com.google.protobuf.Descriptors.Descriptor
5986           getDescriptorForType() {
5987         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_descriptor;
5988       }
5989 
5990       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest getDefaultInstanceForType() {
5991         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance();
5992       }
5993 
5994       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest build() {
5995         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = buildPartial();
5996         if (!result.isInitialized()) {
5997           throw newUninitializedMessageException(result);
5998         }
5999         return result;
6000       }
6001 
6002       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest buildPartial() {
6003         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest(this);
6004         int from_bitField0_ = bitField0_;
6005         int to_bitField0_ = 0;
6006         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
6007           to_bitField0_ |= 0x00000001;
6008         }
6009         if (regionBuilder_ == null) {
6010           result.region_ = region_;
6011         } else {
6012           result.region_ = regionBuilder_.build();
6013         }
6014         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
6015           to_bitField0_ |= 0x00000002;
6016         }
6017         if (getBuilder_ == null) {
6018           result.get_ = get_;
6019         } else {
6020           result.get_ = getBuilder_.build();
6021         }
6022         result.bitField0_ = to_bitField0_;
6023         onBuilt();
6024         return result;
6025       }
6026 
6027       public Builder mergeFrom(com.google.protobuf.Message other) {
6028         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) {
6029           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)other);
6030         } else {
6031           super.mergeFrom(other);
6032           return this;
6033         }
6034       }
6035 
6036       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest other) {
6037         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance()) return this;
6038         if (other.hasRegion()) {
6039           mergeRegion(other.getRegion());
6040         }
6041         if (other.hasGet()) {
6042           mergeGet(other.getGet());
6043         }
6044         this.mergeUnknownFields(other.getUnknownFields());
6045         return this;
6046       }
6047 
6048       public final boolean isInitialized() {
6049         if (!hasRegion()) {
6050           
6051           return false;
6052         }
6053         if (!hasGet()) {
6054           
6055           return false;
6056         }
6057         if (!getRegion().isInitialized()) {
6058           
6059           return false;
6060         }
6061         if (!getGet().isInitialized()) {
6062           
6063           return false;
6064         }
6065         return true;
6066       }
6067 
6068       public Builder mergeFrom(
6069           com.google.protobuf.CodedInputStream input,
6070           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6071           throws java.io.IOException {
6072         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parsedMessage = null;
6073         try {
6074           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
6075         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6076           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) e.getUnfinishedMessage();
6077           throw e;
6078         } finally {
6079           if (parsedMessage != null) {
6080             mergeFrom(parsedMessage);
6081           }
6082         }
6083         return this;
6084       }
6085       private int bitField0_;
6086 
6087       // required .RegionSpecifier region = 1;
6088       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
6089       private com.google.protobuf.SingleFieldBuilder<
6090           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
6091       /**
6092        * <code>required .RegionSpecifier region = 1;</code>
6093        */
6094       public boolean hasRegion() {
6095         return ((bitField0_ & 0x00000001) == 0x00000001);
6096       }
6097       /**
6098        * <code>required .RegionSpecifier region = 1;</code>
6099        */
6100       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
6101         if (regionBuilder_ == null) {
6102           return region_;
6103         } else {
6104           return regionBuilder_.getMessage();
6105         }
6106       }
6107       /**
6108        * <code>required .RegionSpecifier region = 1;</code>
6109        */
6110       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
6111         if (regionBuilder_ == null) {
6112           if (value == null) {
6113             throw new NullPointerException();
6114           }
6115           region_ = value;
6116           onChanged();
6117         } else {
6118           regionBuilder_.setMessage(value);
6119         }
6120         bitField0_ |= 0x00000001;
6121         return this;
6122       }
6123       /**
6124        * <code>required .RegionSpecifier region = 1;</code>
6125        */
6126       public Builder setRegion(
6127           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
6128         if (regionBuilder_ == null) {
6129           region_ = builderForValue.build();
6130           onChanged();
6131         } else {
6132           regionBuilder_.setMessage(builderForValue.build());
6133         }
6134         bitField0_ |= 0x00000001;
6135         return this;
6136       }
6137       /**
6138        * <code>required .RegionSpecifier region = 1;</code>
6139        */
6140       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
6141         if (regionBuilder_ == null) {
6142           if (((bitField0_ & 0x00000001) == 0x00000001) &&
6143               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
6144             region_ =
6145               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
6146           } else {
6147             region_ = value;
6148           }
6149           onChanged();
6150         } else {
6151           regionBuilder_.mergeFrom(value);
6152         }
6153         bitField0_ |= 0x00000001;
6154         return this;
6155       }
6156       /**
6157        * <code>required .RegionSpecifier region = 1;</code>
6158        */
6159       public Builder clearRegion() {
6160         if (regionBuilder_ == null) {
6161           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
6162           onChanged();
6163         } else {
6164           regionBuilder_.clear();
6165         }
6166         bitField0_ = (bitField0_ & ~0x00000001);
6167         return this;
6168       }
6169       /**
6170        * <code>required .RegionSpecifier region = 1;</code>
6171        */
6172       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
6173         bitField0_ |= 0x00000001;
6174         onChanged();
6175         return getRegionFieldBuilder().getBuilder();
6176       }
6177       /**
6178        * <code>required .RegionSpecifier region = 1;</code>
6179        */
6180       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
6181         if (regionBuilder_ != null) {
6182           return regionBuilder_.getMessageOrBuilder();
6183         } else {
6184           return region_;
6185         }
6186       }
6187       /**
6188        * <code>required .RegionSpecifier region = 1;</code>
6189        */
6190       private com.google.protobuf.SingleFieldBuilder<
6191           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> 
6192           getRegionFieldBuilder() {
6193         if (regionBuilder_ == null) {
6194           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
6195               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
6196                   region_,
6197                   getParentForChildren(),
6198                   isClean());
6199           region_ = null;
6200         }
6201         return regionBuilder_;
6202       }
6203 
6204       // required .Get get = 2;
6205       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
6206       private com.google.protobuf.SingleFieldBuilder<
6207           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> getBuilder_;
6208       /**
6209        * <code>required .Get get = 2;</code>
6210        */
6211       public boolean hasGet() {
6212         return ((bitField0_ & 0x00000002) == 0x00000002);
6213       }
6214       /**
6215        * <code>required .Get get = 2;</code>
6216        */
6217       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() {
6218         if (getBuilder_ == null) {
6219           return get_;
6220         } else {
6221           return getBuilder_.getMessage();
6222         }
6223       }
6224       /**
6225        * <code>required .Get get = 2;</code>
6226        */
6227       public Builder setGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) {
6228         if (getBuilder_ == null) {
6229           if (value == null) {
6230             throw new NullPointerException();
6231           }
6232           get_ = value;
6233           onChanged();
6234         } else {
6235           getBuilder_.setMessage(value);
6236         }
6237         bitField0_ |= 0x00000002;
6238         return this;
6239       }
6240       /**
6241        * <code>required .Get get = 2;</code>
6242        */
6243       public Builder setGet(
6244           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder builderForValue) {
6245         if (getBuilder_ == null) {
6246           get_ = builderForValue.build();
6247           onChanged();
6248         } else {
6249           getBuilder_.setMessage(builderForValue.build());
6250         }
6251         bitField0_ |= 0x00000002;
6252         return this;
6253       }
6254       /**
6255        * <code>required .Get get = 2;</code>
6256        */
6257       public Builder mergeGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) {
6258         if (getBuilder_ == null) {
6259           if (((bitField0_ & 0x00000002) == 0x00000002) &&
6260               get_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) {
6261             get_ =
6262               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(get_).mergeFrom(value).buildPartial();
6263           } else {
6264             get_ = value;
6265           }
6266           onChanged();
6267         } else {
6268           getBuilder_.mergeFrom(value);
6269         }
6270         bitField0_ |= 0x00000002;
6271         return this;
6272       }
6273       /**
6274        * <code>required .Get get = 2;</code>
6275        */
6276       public Builder clearGet() {
6277         if (getBuilder_ == null) {
6278           get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
6279           onChanged();
6280         } else {
6281           getBuilder_.clear();
6282         }
6283         bitField0_ = (bitField0_ & ~0x00000002);
6284         return this;
6285       }
6286       /**
6287        * <code>required .Get get = 2;</code>
6288        */
6289       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder getGetBuilder() {
6290         bitField0_ |= 0x00000002;
6291         onChanged();
6292         return getGetFieldBuilder().getBuilder();
6293       }
6294       /**
6295        * <code>required .Get get = 2;</code>
6296        */
6297       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() {
6298         if (getBuilder_ != null) {
6299           return getBuilder_.getMessageOrBuilder();
6300         } else {
6301           return get_;
6302         }
6303       }
6304       /**
6305        * <code>required .Get get = 2;</code>
6306        */
6307       private com.google.protobuf.SingleFieldBuilder<
6308           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> 
6309           getGetFieldBuilder() {
6310         if (getBuilder_ == null) {
6311           getBuilder_ = new com.google.protobuf.SingleFieldBuilder<
6312               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder>(
6313                   get_,
6314                   getParentForChildren(),
6315                   isClean());
6316           get_ = null;
6317         }
6318         return getBuilder_;
6319       }
6320 
6321       // @@protoc_insertion_point(builder_scope:GetRequest)
6322     }
6323 
6324     static {
6325       defaultInstance = new GetRequest(true);
6326       defaultInstance.initFields();
6327     }
6328 
6329     // @@protoc_insertion_point(class_scope:GetRequest)
6330   }
6331 
6332   public interface GetResponseOrBuilder
6333       extends com.google.protobuf.MessageOrBuilder {
6334 
6335     // optional .Result result = 1;
6336     /**
6337      * <code>optional .Result result = 1;</code>
6338      */
6339     boolean hasResult();
6340     /**
6341      * <code>optional .Result result = 1;</code>
6342      */
6343     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult();
6344     /**
6345      * <code>optional .Result result = 1;</code>
6346      */
6347     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder();
6348   }
6349   /**
6350    * Protobuf type {@code GetResponse}
6351    */
6352   public static final class GetResponse extends
6353       com.google.protobuf.GeneratedMessage
6354       implements GetResponseOrBuilder {
6355     // Use GetResponse.newBuilder() to construct.
6356     private GetResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
6357       super(builder);
6358       this.unknownFields = builder.getUnknownFields();
6359     }
6360     private GetResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
6361 
6362     private static final GetResponse defaultInstance;
6363     public static GetResponse getDefaultInstance() {
6364       return defaultInstance;
6365     }
6366 
6367     public GetResponse getDefaultInstanceForType() {
6368       return defaultInstance;
6369     }
6370 
6371     private final com.google.protobuf.UnknownFieldSet unknownFields;
6372     @java.lang.Override
6373     public final com.google.protobuf.UnknownFieldSet
6374         getUnknownFields() {
6375       return this.unknownFields;
6376     }
6377     private GetResponse(
6378         com.google.protobuf.CodedInputStream input,
6379         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6380         throws com.google.protobuf.InvalidProtocolBufferException {
6381       initFields();
6382       int mutable_bitField0_ = 0;
6383       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
6384           com.google.protobuf.UnknownFieldSet.newBuilder();
6385       try {
6386         boolean done = false;
6387         while (!done) {
6388           int tag = input.readTag();
6389           switch (tag) {
6390             case 0:
6391               done = true;
6392               break;
6393             default: {
6394               if (!parseUnknownField(input, unknownFields,
6395                                      extensionRegistry, tag)) {
6396                 done = true;
6397               }
6398               break;
6399             }
6400             case 10: {
6401               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = null;
6402               if (((bitField0_ & 0x00000001) == 0x00000001)) {
6403                 subBuilder = result_.toBuilder();
6404               }
6405               result_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry);
6406               if (subBuilder != null) {
6407                 subBuilder.mergeFrom(result_);
6408                 result_ = subBuilder.buildPartial();
6409               }
6410               bitField0_ |= 0x00000001;
6411               break;
6412             }
6413           }
6414         }
6415       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6416         throw e.setUnfinishedMessage(this);
6417       } catch (java.io.IOException e) {
6418         throw new com.google.protobuf.InvalidProtocolBufferException(
6419             e.getMessage()).setUnfinishedMessage(this);
6420       } finally {
6421         this.unknownFields = unknownFields.build();
6422         makeExtensionsImmutable();
6423       }
6424     }
6425     public static final com.google.protobuf.Descriptors.Descriptor
6426         getDescriptor() {
6427       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_descriptor;
6428     }
6429 
6430     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
6431         internalGetFieldAccessorTable() {
6432       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_fieldAccessorTable
6433           .ensureFieldAccessorsInitialized(
6434               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.Builder.class);
6435     }
6436 
6437     public static com.google.protobuf.Parser<GetResponse> PARSER =
6438         new com.google.protobuf.AbstractParser<GetResponse>() {
6439       public GetResponse parsePartialFrom(
6440           com.google.protobuf.CodedInputStream input,
6441           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6442           throws com.google.protobuf.InvalidProtocolBufferException {
6443         return new GetResponse(input, extensionRegistry);
6444       }
6445     };
6446 
6447     @java.lang.Override
6448     public com.google.protobuf.Parser<GetResponse> getParserForType() {
6449       return PARSER;
6450     }
6451 
6452     private int bitField0_;
6453     // optional .Result result = 1;
6454     public static final int RESULT_FIELD_NUMBER = 1;
6455     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_;
6456     /**
6457      * <code>optional .Result result = 1;</code>
6458      */
6459     public boolean hasResult() {
6460       return ((bitField0_ & 0x00000001) == 0x00000001);
6461     }
6462     /**
6463      * <code>optional .Result result = 1;</code>
6464      */
6465     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() {
6466       return result_;
6467     }
6468     /**
6469      * <code>optional .Result result = 1;</code>
6470      */
6471     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() {
6472       return result_;
6473     }
6474 
6475     private void initFields() {
6476       result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
6477     }
6478     private byte memoizedIsInitialized = -1;
6479     public final boolean isInitialized() {
6480       byte isInitialized = memoizedIsInitialized;
6481       if (isInitialized != -1) return isInitialized == 1;
6482 
6483       memoizedIsInitialized = 1;
6484       return true;
6485     }
6486 
6487     public void writeTo(com.google.protobuf.CodedOutputStream output)
6488                         throws java.io.IOException {
6489       getSerializedSize();
6490       if (((bitField0_ & 0x00000001) == 0x00000001)) {
6491         output.writeMessage(1, result_);
6492       }
6493       getUnknownFields().writeTo(output);
6494     }
6495 
6496     private int memoizedSerializedSize = -1;
6497     public int getSerializedSize() {
6498       int size = memoizedSerializedSize;
6499       if (size != -1) return size;
6500 
6501       size = 0;
6502       if (((bitField0_ & 0x00000001) == 0x00000001)) {
6503         size += com.google.protobuf.CodedOutputStream
6504           .computeMessageSize(1, result_);
6505       }
6506       size += getUnknownFields().getSerializedSize();
6507       memoizedSerializedSize = size;
6508       return size;
6509     }
6510 
6511     private static final long serialVersionUID = 0L;
6512     @java.lang.Override
6513     protected java.lang.Object writeReplace()
6514         throws java.io.ObjectStreamException {
6515       return super.writeReplace();
6516     }
6517 
6518     @java.lang.Override
6519     public boolean equals(final java.lang.Object obj) {
6520       if (obj == this) {
6521        return true;
6522       }
6523       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse)) {
6524         return super.equals(obj);
6525       }
6526       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) obj;
6527 
6528       boolean result = true;
6529       result = result && (hasResult() == other.hasResult());
6530       if (hasResult()) {
6531         result = result && getResult()
6532             .equals(other.getResult());
6533       }
6534       result = result &&
6535           getUnknownFields().equals(other.getUnknownFields());
6536       return result;
6537     }
6538 
6539     private int memoizedHashCode = 0;
6540     @java.lang.Override
6541     public int hashCode() {
6542       if (memoizedHashCode != 0) {
6543         return memoizedHashCode;
6544       }
6545       int hash = 41;
6546       hash = (19 * hash) + getDescriptorForType().hashCode();
6547       if (hasResult()) {
6548         hash = (37 * hash) + RESULT_FIELD_NUMBER;
6549         hash = (53 * hash) + getResult().hashCode();
6550       }
6551       hash = (29 * hash) + getUnknownFields().hashCode();
6552       memoizedHashCode = hash;
6553       return hash;
6554     }
6555 
6556     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(
6557         com.google.protobuf.ByteString data)
6558         throws com.google.protobuf.InvalidProtocolBufferException {
6559       return PARSER.parseFrom(data);
6560     }
6561     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(
6562         com.google.protobuf.ByteString data,
6563         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6564         throws com.google.protobuf.InvalidProtocolBufferException {
6565       return PARSER.parseFrom(data, extensionRegistry);
6566     }
6567     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(byte[] data)
6568         throws com.google.protobuf.InvalidProtocolBufferException {
6569       return PARSER.parseFrom(data);
6570     }
6571     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(
6572         byte[] data,
6573         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6574         throws com.google.protobuf.InvalidProtocolBufferException {
6575       return PARSER.parseFrom(data, extensionRegistry);
6576     }
6577     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(java.io.InputStream input)
6578         throws java.io.IOException {
6579       return PARSER.parseFrom(input);
6580     }
6581     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(
6582         java.io.InputStream input,
6583         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6584         throws java.io.IOException {
6585       return PARSER.parseFrom(input, extensionRegistry);
6586     }
6587     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseDelimitedFrom(java.io.InputStream input)
6588         throws java.io.IOException {
6589       return PARSER.parseDelimitedFrom(input);
6590     }
6591     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseDelimitedFrom(
6592         java.io.InputStream input,
6593         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6594         throws java.io.IOException {
6595       return PARSER.parseDelimitedFrom(input, extensionRegistry);
6596     }
6597     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(
6598         com.google.protobuf.CodedInputStream input)
6599         throws java.io.IOException {
6600       return PARSER.parseFrom(input);
6601     }
6602     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(
6603         com.google.protobuf.CodedInputStream input,
6604         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6605         throws java.io.IOException {
6606       return PARSER.parseFrom(input, extensionRegistry);
6607     }
6608 
6609     public static Builder newBuilder() { return Builder.create(); }
6610     public Builder newBuilderForType() { return newBuilder(); }
6611     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse prototype) {
6612       return newBuilder().mergeFrom(prototype);
6613     }
6614     public Builder toBuilder() { return newBuilder(this); }
6615 
6616     @java.lang.Override
6617     protected Builder newBuilderForType(
6618         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6619       Builder builder = new Builder(parent);
6620       return builder;
6621     }
6622     /**
6623      * Protobuf type {@code GetResponse}
6624      */
6625     public static final class Builder extends
6626         com.google.protobuf.GeneratedMessage.Builder<Builder>
6627        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponseOrBuilder {
6628       public static final com.google.protobuf.Descriptors.Descriptor
6629           getDescriptor() {
6630         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_descriptor;
6631       }
6632 
6633       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
6634           internalGetFieldAccessorTable() {
6635         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_fieldAccessorTable
6636             .ensureFieldAccessorsInitialized(
6637                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.Builder.class);
6638       }
6639 
6640       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.newBuilder()
6641       private Builder() {
6642         maybeForceBuilderInitialization();
6643       }
6644 
6645       private Builder(
6646           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6647         super(parent);
6648         maybeForceBuilderInitialization();
6649       }
6650       private void maybeForceBuilderInitialization() {
6651         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
6652           getResultFieldBuilder();
6653         }
6654       }
6655       private static Builder create() {
6656         return new Builder();
6657       }
6658 
6659       public Builder clear() {
6660         super.clear();
6661         if (resultBuilder_ == null) {
6662           result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
6663         } else {
6664           resultBuilder_.clear();
6665         }
6666         bitField0_ = (bitField0_ & ~0x00000001);
6667         return this;
6668       }
6669 
6670       public Builder clone() {
6671         return create().mergeFrom(buildPartial());
6672       }
6673 
6674       public com.google.protobuf.Descriptors.Descriptor
6675           getDescriptorForType() {
6676         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_descriptor;
6677       }
6678 
6679       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse getDefaultInstanceForType() {
6680         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance();
6681       }
6682 
6683       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse build() {
6684         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = buildPartial();
6685         if (!result.isInitialized()) {
6686           throw newUninitializedMessageException(result);
6687         }
6688         return result;
6689       }
6690 
6691       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse buildPartial() {
6692         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse(this);
6693         int from_bitField0_ = bitField0_;
6694         int to_bitField0_ = 0;
6695         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
6696           to_bitField0_ |= 0x00000001;
6697         }
6698         if (resultBuilder_ == null) {
6699           result.result_ = result_;
6700         } else {
6701           result.result_ = resultBuilder_.build();
6702         }
6703         result.bitField0_ = to_bitField0_;
6704         onBuilt();
6705         return result;
6706       }
6707 
6708       public Builder mergeFrom(com.google.protobuf.Message other) {
6709         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) {
6710           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse)other);
6711         } else {
6712           super.mergeFrom(other);
6713           return this;
6714         }
6715       }
6716 
6717       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse other) {
6718         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance()) return this;
6719         if (other.hasResult()) {
6720           mergeResult(other.getResult());
6721         }
6722         this.mergeUnknownFields(other.getUnknownFields());
6723         return this;
6724       }
6725 
6726       public final boolean isInitialized() {
6727         return true;
6728       }
6729 
6730       public Builder mergeFrom(
6731           com.google.protobuf.CodedInputStream input,
6732           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6733           throws java.io.IOException {
6734         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parsedMessage = null;
6735         try {
6736           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
6737         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6738           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) e.getUnfinishedMessage();
6739           throw e;
6740         } finally {
6741           if (parsedMessage != null) {
6742             mergeFrom(parsedMessage);
6743           }
6744         }
6745         return this;
6746       }
6747       private int bitField0_;
6748 
6749       // optional .Result result = 1;
6750       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
6751       private com.google.protobuf.SingleFieldBuilder<
6752           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_;
6753       /**
6754        * <code>optional .Result result = 1;</code>
6755        */
6756       public boolean hasResult() {
6757         return ((bitField0_ & 0x00000001) == 0x00000001);
6758       }
6759       /**
6760        * <code>optional .Result result = 1;</code>
6761        */
6762       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() {
6763         if (resultBuilder_ == null) {
6764           return result_;
6765         } else {
6766           return resultBuilder_.getMessage();
6767         }
6768       }
6769       /**
6770        * <code>optional .Result result = 1;</code>
6771        */
6772       public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
6773         if (resultBuilder_ == null) {
6774           if (value == null) {
6775             throw new NullPointerException();
6776           }
6777           result_ = value;
6778           onChanged();
6779         } else {
6780           resultBuilder_.setMessage(value);
6781         }
6782         bitField0_ |= 0x00000001;
6783         return this;
6784       }
6785       /**
6786        * <code>optional .Result result = 1;</code>
6787        */
6788       public Builder setResult(
6789           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
6790         if (resultBuilder_ == null) {
6791           result_ = builderForValue.build();
6792           onChanged();
6793         } else {
6794           resultBuilder_.setMessage(builderForValue.build());
6795         }
6796         bitField0_ |= 0x00000001;
6797         return this;
6798       }
6799       /**
6800        * <code>optional .Result result = 1;</code>
6801        */
6802       public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
6803         if (resultBuilder_ == null) {
6804           if (((bitField0_ & 0x00000001) == 0x00000001) &&
6805               result_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) {
6806             result_ =
6807               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial();
6808           } else {
6809             result_ = value;
6810           }
6811           onChanged();
6812         } else {
6813           resultBuilder_.mergeFrom(value);
6814         }
6815         bitField0_ |= 0x00000001;
6816         return this;
6817       }
6818       /**
6819        * <code>optional .Result result = 1;</code>
6820        */
6821       public Builder clearResult() {
6822         if (resultBuilder_ == null) {
6823           result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
6824           onChanged();
6825         } else {
6826           resultBuilder_.clear();
6827         }
6828         bitField0_ = (bitField0_ & ~0x00000001);
6829         return this;
6830       }
6831       /**
6832        * <code>optional .Result result = 1;</code>
6833        */
6834       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() {
6835         bitField0_ |= 0x00000001;
6836         onChanged();
6837         return getResultFieldBuilder().getBuilder();
6838       }
6839       /**
6840        * <code>optional .Result result = 1;</code>
6841        */
6842       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() {
6843         if (resultBuilder_ != null) {
6844           return resultBuilder_.getMessageOrBuilder();
6845         } else {
6846           return result_;
6847         }
6848       }
6849       /**
6850        * <code>optional .Result result = 1;</code>
6851        */
6852       private com.google.protobuf.SingleFieldBuilder<
6853           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> 
6854           getResultFieldBuilder() {
6855         if (resultBuilder_ == null) {
6856           resultBuilder_ = new com.google.protobuf.SingleFieldBuilder<
6857               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>(
6858                   result_,
6859                   getParentForChildren(),
6860                   isClean());
6861           result_ = null;
6862         }
6863         return resultBuilder_;
6864       }
6865 
6866       // @@protoc_insertion_point(builder_scope:GetResponse)
6867     }
6868 
6869     static {
6870       defaultInstance = new GetResponse(true);
6871       defaultInstance.initFields();
6872     }
6873 
6874     // @@protoc_insertion_point(class_scope:GetResponse)
6875   }
6876 
6877   public interface ConditionOrBuilder
6878       extends com.google.protobuf.MessageOrBuilder {
6879 
6880     // required bytes row = 1;
6881     /**
6882      * <code>required bytes row = 1;</code>
6883      */
6884     boolean hasRow();
6885     /**
6886      * <code>required bytes row = 1;</code>
6887      */
6888     com.google.protobuf.ByteString getRow();
6889 
6890     // required bytes family = 2;
6891     /**
6892      * <code>required bytes family = 2;</code>
6893      */
6894     boolean hasFamily();
6895     /**
6896      * <code>required bytes family = 2;</code>
6897      */
6898     com.google.protobuf.ByteString getFamily();
6899 
6900     // required bytes qualifier = 3;
6901     /**
6902      * <code>required bytes qualifier = 3;</code>
6903      */
6904     boolean hasQualifier();
6905     /**
6906      * <code>required bytes qualifier = 3;</code>
6907      */
6908     com.google.protobuf.ByteString getQualifier();
6909 
6910     // required .CompareType compare_type = 4;
6911     /**
6912      * <code>required .CompareType compare_type = 4;</code>
6913      */
6914     boolean hasCompareType();
6915     /**
6916      * <code>required .CompareType compare_type = 4;</code>
6917      */
6918     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareType();
6919 
6920     // required .Comparator comparator = 5;
6921     /**
6922      * <code>required .Comparator comparator = 5;</code>
6923      */
6924     boolean hasComparator();
6925     /**
6926      * <code>required .Comparator comparator = 5;</code>
6927      */
6928     org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator();
6929     /**
6930      * <code>required .Comparator comparator = 5;</code>
6931      */
6932     org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder();
6933   }
6934   /**
6935    * Protobuf type {@code Condition}
6936    *
6937    * <pre>
6938    **
6939    * Condition to check if the value of a given cell (row,
6940    * family, qualifier) matches a value via a given comparator.
6941    *
6942    * Condition is used in check and mutate operations.
6943    * </pre>
6944    */
6945   public static final class Condition extends
6946       com.google.protobuf.GeneratedMessage
6947       implements ConditionOrBuilder {
6948     // Use Condition.newBuilder() to construct.
6949     private Condition(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
6950       super(builder);
6951       this.unknownFields = builder.getUnknownFields();
6952     }
6953     private Condition(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
6954 
6955     private static final Condition defaultInstance;
6956     public static Condition getDefaultInstance() {
6957       return defaultInstance;
6958     }
6959 
6960     public Condition getDefaultInstanceForType() {
6961       return defaultInstance;
6962     }
6963 
6964     private final com.google.protobuf.UnknownFieldSet unknownFields;
6965     @java.lang.Override
6966     public final com.google.protobuf.UnknownFieldSet
6967         getUnknownFields() {
6968       return this.unknownFields;
6969     }
6970     private Condition(
6971         com.google.protobuf.CodedInputStream input,
6972         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6973         throws com.google.protobuf.InvalidProtocolBufferException {
6974       initFields();
6975       int mutable_bitField0_ = 0;
6976       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
6977           com.google.protobuf.UnknownFieldSet.newBuilder();
6978       try {
6979         boolean done = false;
6980         while (!done) {
6981           int tag = input.readTag();
6982           switch (tag) {
6983             case 0:
6984               done = true;
6985               break;
6986             default: {
6987               if (!parseUnknownField(input, unknownFields,
6988                                      extensionRegistry, tag)) {
6989                 done = true;
6990               }
6991               break;
6992             }
6993             case 10: {
6994               bitField0_ |= 0x00000001;
6995               row_ = input.readBytes();
6996               break;
6997             }
6998             case 18: {
6999               bitField0_ |= 0x00000002;
7000               family_ = input.readBytes();
7001               break;
7002             }
7003             case 26: {
7004               bitField0_ |= 0x00000004;
7005               qualifier_ = input.readBytes();
7006               break;
7007             }
7008             case 32: {
7009               int rawValue = input.readEnum();
7010               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue);
7011               if (value == null) {
7012                 unknownFields.mergeVarintField(4, rawValue);
7013               } else {
7014                 bitField0_ |= 0x00000008;
7015                 compareType_ = value;
7016               }
7017               break;
7018             }
7019             case 42: {
7020               org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = null;
7021               if (((bitField0_ & 0x00000010) == 0x00000010)) {
7022                 subBuilder = comparator_.toBuilder();
7023               }
7024               comparator_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.PARSER, extensionRegistry);
7025               if (subBuilder != null) {
7026                 subBuilder.mergeFrom(comparator_);
7027                 comparator_ = subBuilder.buildPartial();
7028               }
7029               bitField0_ |= 0x00000010;
7030               break;
7031             }
7032           }
7033         }
7034       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7035         throw e.setUnfinishedMessage(this);
7036       } catch (java.io.IOException e) {
7037         throw new com.google.protobuf.InvalidProtocolBufferException(
7038             e.getMessage()).setUnfinishedMessage(this);
7039       } finally {
7040         this.unknownFields = unknownFields.build();
7041         makeExtensionsImmutable();
7042       }
7043     }
7044     public static final com.google.protobuf.Descriptors.Descriptor
7045         getDescriptor() {
7046       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_descriptor;
7047     }
7048 
7049     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
7050         internalGetFieldAccessorTable() {
7051       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_fieldAccessorTable
7052           .ensureFieldAccessorsInitialized(
7053               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder.class);
7054     }
7055 
7056     public static com.google.protobuf.Parser<Condition> PARSER =
7057         new com.google.protobuf.AbstractParser<Condition>() {
7058       public Condition parsePartialFrom(
7059           com.google.protobuf.CodedInputStream input,
7060           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7061           throws com.google.protobuf.InvalidProtocolBufferException {
7062         return new Condition(input, extensionRegistry);
7063       }
7064     };
7065 
7066     @java.lang.Override
7067     public com.google.protobuf.Parser<Condition> getParserForType() {
7068       return PARSER;
7069     }
7070 
7071     private int bitField0_;
7072     // required bytes row = 1;
7073     public static final int ROW_FIELD_NUMBER = 1;
7074     private com.google.protobuf.ByteString row_;
7075     /**
7076      * <code>required bytes row = 1;</code>
7077      */
7078     public boolean hasRow() {
7079       return ((bitField0_ & 0x00000001) == 0x00000001);
7080     }
7081     /**
7082      * <code>required bytes row = 1;</code>
7083      */
7084     public com.google.protobuf.ByteString getRow() {
7085       return row_;
7086     }
7087 
7088     // required bytes family = 2;
7089     public static final int FAMILY_FIELD_NUMBER = 2;
7090     private com.google.protobuf.ByteString family_;
7091     /**
7092      * <code>required bytes family = 2;</code>
7093      */
7094     public boolean hasFamily() {
7095       return ((bitField0_ & 0x00000002) == 0x00000002);
7096     }
7097     /**
7098      * <code>required bytes family = 2;</code>
7099      */
7100     public com.google.protobuf.ByteString getFamily() {
7101       return family_;
7102     }
7103 
7104     // required bytes qualifier = 3;
7105     public static final int QUALIFIER_FIELD_NUMBER = 3;
7106     private com.google.protobuf.ByteString qualifier_;
7107     /**
7108      * <code>required bytes qualifier = 3;</code>
7109      */
7110     public boolean hasQualifier() {
7111       return ((bitField0_ & 0x00000004) == 0x00000004);
7112     }
7113     /**
7114      * <code>required bytes qualifier = 3;</code>
7115      */
7116     public com.google.protobuf.ByteString getQualifier() {
7117       return qualifier_;
7118     }
7119 
7120     // required .CompareType compare_type = 4;
7121     public static final int COMPARE_TYPE_FIELD_NUMBER = 4;
7122     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareType_;
7123     /**
7124      * <code>required .CompareType compare_type = 4;</code>
7125      */
7126     public boolean hasCompareType() {
7127       return ((bitField0_ & 0x00000008) == 0x00000008);
7128     }
7129     /**
7130      * <code>required .CompareType compare_type = 4;</code>
7131      */
7132     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareType() {
7133       return compareType_;
7134     }
7135 
7136     // required .Comparator comparator = 5;
7137     public static final int COMPARATOR_FIELD_NUMBER = 5;
7138     private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_;
7139     /**
7140      * <code>required .Comparator comparator = 5;</code>
7141      */
7142     public boolean hasComparator() {
7143       return ((bitField0_ & 0x00000010) == 0x00000010);
7144     }
7145     /**
7146      * <code>required .Comparator comparator = 5;</code>
7147      */
7148     public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() {
7149       return comparator_;
7150     }
7151     /**
7152      * <code>required .Comparator comparator = 5;</code>
7153      */
7154     public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() {
7155       return comparator_;
7156     }
7157 
7158     private void initFields() {
7159       row_ = com.google.protobuf.ByteString.EMPTY;
7160       family_ = com.google.protobuf.ByteString.EMPTY;
7161       qualifier_ = com.google.protobuf.ByteString.EMPTY;
7162       compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
7163       comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
7164     }
7165     private byte memoizedIsInitialized = -1;
7166     public final boolean isInitialized() {
7167       byte isInitialized = memoizedIsInitialized;
7168       if (isInitialized != -1) return isInitialized == 1;
7169 
7170       if (!hasRow()) {
7171         memoizedIsInitialized = 0;
7172         return false;
7173       }
7174       if (!hasFamily()) {
7175         memoizedIsInitialized = 0;
7176         return false;
7177       }
7178       if (!hasQualifier()) {
7179         memoizedIsInitialized = 0;
7180         return false;
7181       }
7182       if (!hasCompareType()) {
7183         memoizedIsInitialized = 0;
7184         return false;
7185       }
7186       if (!hasComparator()) {
7187         memoizedIsInitialized = 0;
7188         return false;
7189       }
7190       if (!getComparator().isInitialized()) {
7191         memoizedIsInitialized = 0;
7192         return false;
7193       }
7194       memoizedIsInitialized = 1;
7195       return true;
7196     }
7197 
7198     public void writeTo(com.google.protobuf.CodedOutputStream output)
7199                         throws java.io.IOException {
7200       getSerializedSize();
7201       if (((bitField0_ & 0x00000001) == 0x00000001)) {
7202         output.writeBytes(1, row_);
7203       }
7204       if (((bitField0_ & 0x00000002) == 0x00000002)) {
7205         output.writeBytes(2, family_);
7206       }
7207       if (((bitField0_ & 0x00000004) == 0x00000004)) {
7208         output.writeBytes(3, qualifier_);
7209       }
7210       if (((bitField0_ & 0x00000008) == 0x00000008)) {
7211         output.writeEnum(4, compareType_.getNumber());
7212       }
7213       if (((bitField0_ & 0x00000010) == 0x00000010)) {
7214         output.writeMessage(5, comparator_);
7215       }
7216       getUnknownFields().writeTo(output);
7217     }
7218 
7219     private int memoizedSerializedSize = -1;
7220     public int getSerializedSize() {
7221       int size = memoizedSerializedSize;
7222       if (size != -1) return size;
7223 
7224       size = 0;
7225       if (((bitField0_ & 0x00000001) == 0x00000001)) {
7226         size += com.google.protobuf.CodedOutputStream
7227           .computeBytesSize(1, row_);
7228       }
7229       if (((bitField0_ & 0x00000002) == 0x00000002)) {
7230         size += com.google.protobuf.CodedOutputStream
7231           .computeBytesSize(2, family_);
7232       }
7233       if (((bitField0_ & 0x00000004) == 0x00000004)) {
7234         size += com.google.protobuf.CodedOutputStream
7235           .computeBytesSize(3, qualifier_);
7236       }
7237       if (((bitField0_ & 0x00000008) == 0x00000008)) {
7238         size += com.google.protobuf.CodedOutputStream
7239           .computeEnumSize(4, compareType_.getNumber());
7240       }
7241       if (((bitField0_ & 0x00000010) == 0x00000010)) {
7242         size += com.google.protobuf.CodedOutputStream
7243           .computeMessageSize(5, comparator_);
7244       }
7245       size += getUnknownFields().getSerializedSize();
7246       memoizedSerializedSize = size;
7247       return size;
7248     }
7249 
7250     private static final long serialVersionUID = 0L;
7251     @java.lang.Override
7252     protected java.lang.Object writeReplace()
7253         throws java.io.ObjectStreamException {
7254       return super.writeReplace();
7255     }
7256 
7257     @java.lang.Override
7258     public boolean equals(final java.lang.Object obj) {
7259       if (obj == this) {
7260        return true;
7261       }
7262       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition)) {
7263         return super.equals(obj);
7264       }
7265       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) obj;
7266 
7267       boolean result = true;
7268       result = result && (hasRow() == other.hasRow());
7269       if (hasRow()) {
7270         result = result && getRow()
7271             .equals(other.getRow());
7272       }
7273       result = result && (hasFamily() == other.hasFamily());
7274       if (hasFamily()) {
7275         result = result && getFamily()
7276             .equals(other.getFamily());
7277       }
7278       result = result && (hasQualifier() == other.hasQualifier());
7279       if (hasQualifier()) {
7280         result = result && getQualifier()
7281             .equals(other.getQualifier());
7282       }
7283       result = result && (hasCompareType() == other.hasCompareType());
7284       if (hasCompareType()) {
7285         result = result &&
7286             (getCompareType() == other.getCompareType());
7287       }
7288       result = result && (hasComparator() == other.hasComparator());
7289       if (hasComparator()) {
7290         result = result && getComparator()
7291             .equals(other.getComparator());
7292       }
7293       result = result &&
7294           getUnknownFields().equals(other.getUnknownFields());
7295       return result;
7296     }
7297 
7298     private int memoizedHashCode = 0;
7299     @java.lang.Override
7300     public int hashCode() {
7301       if (memoizedHashCode != 0) {
7302         return memoizedHashCode;
7303       }
7304       int hash = 41;
7305       hash = (19 * hash) + getDescriptorForType().hashCode();
7306       if (hasRow()) {
7307         hash = (37 * hash) + ROW_FIELD_NUMBER;
7308         hash = (53 * hash) + getRow().hashCode();
7309       }
7310       if (hasFamily()) {
7311         hash = (37 * hash) + FAMILY_FIELD_NUMBER;
7312         hash = (53 * hash) + getFamily().hashCode();
7313       }
7314       if (hasQualifier()) {
7315         hash = (37 * hash) + QUALIFIER_FIELD_NUMBER;
7316         hash = (53 * hash) + getQualifier().hashCode();
7317       }
7318       if (hasCompareType()) {
7319         hash = (37 * hash) + COMPARE_TYPE_FIELD_NUMBER;
7320         hash = (53 * hash) + hashEnum(getCompareType());
7321       }
7322       if (hasComparator()) {
7323         hash = (37 * hash) + COMPARATOR_FIELD_NUMBER;
7324         hash = (53 * hash) + getComparator().hashCode();
7325       }
7326       hash = (29 * hash) + getUnknownFields().hashCode();
7327       memoizedHashCode = hash;
7328       return hash;
7329     }
7330 
7331     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(
7332         com.google.protobuf.ByteString data)
7333         throws com.google.protobuf.InvalidProtocolBufferException {
7334       return PARSER.parseFrom(data);
7335     }
7336     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(
7337         com.google.protobuf.ByteString data,
7338         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7339         throws com.google.protobuf.InvalidProtocolBufferException {
7340       return PARSER.parseFrom(data, extensionRegistry);
7341     }
7342     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(byte[] data)
7343         throws com.google.protobuf.InvalidProtocolBufferException {
7344       return PARSER.parseFrom(data);
7345     }
7346     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(
7347         byte[] data,
7348         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7349         throws com.google.protobuf.InvalidProtocolBufferException {
7350       return PARSER.parseFrom(data, extensionRegistry);
7351     }
7352     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(java.io.InputStream input)
7353         throws java.io.IOException {
7354       return PARSER.parseFrom(input);
7355     }
7356     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(
7357         java.io.InputStream input,
7358         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7359         throws java.io.IOException {
7360       return PARSER.parseFrom(input, extensionRegistry);
7361     }
7362     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseDelimitedFrom(java.io.InputStream input)
7363         throws java.io.IOException {
7364       return PARSER.parseDelimitedFrom(input);
7365     }
7366     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseDelimitedFrom(
7367         java.io.InputStream input,
7368         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7369         throws java.io.IOException {
7370       return PARSER.parseDelimitedFrom(input, extensionRegistry);
7371     }
7372     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(
7373         com.google.protobuf.CodedInputStream input)
7374         throws java.io.IOException {
7375       return PARSER.parseFrom(input);
7376     }
7377     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(
7378         com.google.protobuf.CodedInputStream input,
7379         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7380         throws java.io.IOException {
7381       return PARSER.parseFrom(input, extensionRegistry);
7382     }
7383 
7384     public static Builder newBuilder() { return Builder.create(); }
7385     public Builder newBuilderForType() { return newBuilder(); }
7386     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition prototype) {
7387       return newBuilder().mergeFrom(prototype);
7388     }
7389     public Builder toBuilder() { return newBuilder(this); }
7390 
7391     @java.lang.Override
7392     protected Builder newBuilderForType(
7393         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7394       Builder builder = new Builder(parent);
7395       return builder;
7396     }
7397     /**
7398      * Protobuf type {@code Condition}
7399      *
7400      * <pre>
7401      **
7402      * Condition to check if the value of a given cell (row,
7403      * family, qualifier) matches a value via a given comparator.
7404      *
7405      * Condition is used in check and mutate operations.
7406      * </pre>
7407      */
7408     public static final class Builder extends
7409         com.google.protobuf.GeneratedMessage.Builder<Builder>
7410        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder {
7411       public static final com.google.protobuf.Descriptors.Descriptor
7412           getDescriptor() {
7413         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_descriptor;
7414       }
7415 
7416       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
7417           internalGetFieldAccessorTable() {
7418         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_fieldAccessorTable
7419             .ensureFieldAccessorsInitialized(
7420                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder.class);
7421       }
7422 
7423       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder()
7424       private Builder() {
7425         maybeForceBuilderInitialization();
7426       }
7427 
7428       private Builder(
7429           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7430         super(parent);
7431         maybeForceBuilderInitialization();
7432       }
7433       private void maybeForceBuilderInitialization() {
7434         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
7435           getComparatorFieldBuilder();
7436         }
7437       }
7438       private static Builder create() {
7439         return new Builder();
7440       }
7441 
7442       public Builder clear() {
7443         super.clear();
7444         row_ = com.google.protobuf.ByteString.EMPTY;
7445         bitField0_ = (bitField0_ & ~0x00000001);
7446         family_ = com.google.protobuf.ByteString.EMPTY;
7447         bitField0_ = (bitField0_ & ~0x00000002);
7448         qualifier_ = com.google.protobuf.ByteString.EMPTY;
7449         bitField0_ = (bitField0_ & ~0x00000004);
7450         compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
7451         bitField0_ = (bitField0_ & ~0x00000008);
7452         if (comparatorBuilder_ == null) {
7453           comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
7454         } else {
7455           comparatorBuilder_.clear();
7456         }
7457         bitField0_ = (bitField0_ & ~0x00000010);
7458         return this;
7459       }
7460 
7461       public Builder clone() {
7462         return create().mergeFrom(buildPartial());
7463       }
7464 
7465       public com.google.protobuf.Descriptors.Descriptor
7466           getDescriptorForType() {
7467         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_descriptor;
7468       }
7469 
7470       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getDefaultInstanceForType() {
7471         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
7472       }
7473 
7474       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition build() {
7475         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = buildPartial();
7476         if (!result.isInitialized()) {
7477           throw newUninitializedMessageException(result);
7478         }
7479         return result;
7480       }
7481 
7482       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition buildPartial() {
7483         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition(this);
7484         int from_bitField0_ = bitField0_;
7485         int to_bitField0_ = 0;
7486         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
7487           to_bitField0_ |= 0x00000001;
7488         }
7489         result.row_ = row_;
7490         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
7491           to_bitField0_ |= 0x00000002;
7492         }
7493         result.family_ = family_;
7494         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
7495           to_bitField0_ |= 0x00000004;
7496         }
7497         result.qualifier_ = qualifier_;
7498         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
7499           to_bitField0_ |= 0x00000008;
7500         }
7501         result.compareType_ = compareType_;
7502         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
7503           to_bitField0_ |= 0x00000010;
7504         }
7505         if (comparatorBuilder_ == null) {
7506           result.comparator_ = comparator_;
7507         } else {
7508           result.comparator_ = comparatorBuilder_.build();
7509         }
7510         result.bitField0_ = to_bitField0_;
7511         onBuilt();
7512         return result;
7513       }
7514 
7515       public Builder mergeFrom(com.google.protobuf.Message other) {
7516         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) {
7517           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition)other);
7518         } else {
7519           super.mergeFrom(other);
7520           return this;
7521         }
7522       }
7523 
7524       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition other) {
7525         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) return this;
7526         if (other.hasRow()) {
7527           setRow(other.getRow());
7528         }
7529         if (other.hasFamily()) {
7530           setFamily(other.getFamily());
7531         }
7532         if (other.hasQualifier()) {
7533           setQualifier(other.getQualifier());
7534         }
7535         if (other.hasCompareType()) {
7536           setCompareType(other.getCompareType());
7537         }
7538         if (other.hasComparator()) {
7539           mergeComparator(other.getComparator());
7540         }
7541         this.mergeUnknownFields(other.getUnknownFields());
7542         return this;
7543       }
7544 
7545       public final boolean isInitialized() {
7546         if (!hasRow()) {
7547           
7548           return false;
7549         }
7550         if (!hasFamily()) {
7551           
7552           return false;
7553         }
7554         if (!hasQualifier()) {
7555           
7556           return false;
7557         }
7558         if (!hasCompareType()) {
7559           
7560           return false;
7561         }
7562         if (!hasComparator()) {
7563           
7564           return false;
7565         }
7566         if (!getComparator().isInitialized()) {
7567           
7568           return false;
7569         }
7570         return true;
7571       }
7572 
7573       public Builder mergeFrom(
7574           com.google.protobuf.CodedInputStream input,
7575           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7576           throws java.io.IOException {
7577         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parsedMessage = null;
7578         try {
7579           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
7580         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7581           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) e.getUnfinishedMessage();
7582           throw e;
7583         } finally {
7584           if (parsedMessage != null) {
7585             mergeFrom(parsedMessage);
7586           }
7587         }
7588         return this;
7589       }
7590       private int bitField0_;
7591 
7592       // required bytes row = 1;
7593       private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY;
7594       /**
7595        * <code>required bytes row = 1;</code>
7596        */
7597       public boolean hasRow() {
7598         return ((bitField0_ & 0x00000001) == 0x00000001);
7599       }
7600       /**
7601        * <code>required bytes row = 1;</code>
7602        */
7603       public com.google.protobuf.ByteString getRow() {
7604         return row_;
7605       }
7606       /**
7607        * <code>required bytes row = 1;</code>
7608        */
7609       public Builder setRow(com.google.protobuf.ByteString value) {
7610         if (value == null) {
7611     throw new NullPointerException();
7612   }
7613   bitField0_ |= 0x00000001;
7614         row_ = value;
7615         onChanged();
7616         return this;
7617       }
7618       /**
7619        * <code>required bytes row = 1;</code>
7620        */
7621       public Builder clearRow() {
7622         bitField0_ = (bitField0_ & ~0x00000001);
7623         row_ = getDefaultInstance().getRow();
7624         onChanged();
7625         return this;
7626       }
7627 
7628       // required bytes family = 2;
7629       private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
7630       /**
7631        * <code>required bytes family = 2;</code>
7632        */
7633       public boolean hasFamily() {
7634         return ((bitField0_ & 0x00000002) == 0x00000002);
7635       }
7636       /**
7637        * <code>required bytes family = 2;</code>
7638        */
7639       public com.google.protobuf.ByteString getFamily() {
7640         return family_;
7641       }
7642       /**
7643        * <code>required bytes family = 2;</code>
7644        */
7645       public Builder setFamily(com.google.protobuf.ByteString value) {
7646         if (value == null) {
7647     throw new NullPointerException();
7648   }
7649   bitField0_ |= 0x00000002;
7650         family_ = value;
7651         onChanged();
7652         return this;
7653       }
7654       /**
7655        * <code>required bytes family = 2;</code>
7656        */
7657       public Builder clearFamily() {
7658         bitField0_ = (bitField0_ & ~0x00000002);
7659         family_ = getDefaultInstance().getFamily();
7660         onChanged();
7661         return this;
7662       }
7663 
7664       // required bytes qualifier = 3;
7665       private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY;
7666       /**
7667        * <code>required bytes qualifier = 3;</code>
7668        */
7669       public boolean hasQualifier() {
7670         return ((bitField0_ & 0x00000004) == 0x00000004);
7671       }
7672       /**
7673        * <code>required bytes qualifier = 3;</code>
7674        */
7675       public com.google.protobuf.ByteString getQualifier() {
7676         return qualifier_;
7677       }
7678       /**
7679        * <code>required bytes qualifier = 3;</code>
7680        */
7681       public Builder setQualifier(com.google.protobuf.ByteString value) {
7682         if (value == null) {
7683     throw new NullPointerException();
7684   }
7685   bitField0_ |= 0x00000004;
7686         qualifier_ = value;
7687         onChanged();
7688         return this;
7689       }
7690       /**
7691        * <code>required bytes qualifier = 3;</code>
7692        */
7693       public Builder clearQualifier() {
7694         bitField0_ = (bitField0_ & ~0x00000004);
7695         qualifier_ = getDefaultInstance().getQualifier();
7696         onChanged();
7697         return this;
7698       }
7699 
7700       // required .CompareType compare_type = 4;
7701       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
7702       /**
7703        * <code>required .CompareType compare_type = 4;</code>
7704        */
7705       public boolean hasCompareType() {
7706         return ((bitField0_ & 0x00000008) == 0x00000008);
7707       }
7708       /**
7709        * <code>required .CompareType compare_type = 4;</code>
7710        */
7711       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareType() {
7712         return compareType_;
7713       }
7714       /**
7715        * <code>required .CompareType compare_type = 4;</code>
7716        */
7717       public Builder setCompareType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value) {
7718         if (value == null) {
7719           throw new NullPointerException();
7720         }
7721         bitField0_ |= 0x00000008;
7722         compareType_ = value;
7723         onChanged();
7724         return this;
7725       }
7726       /**
7727        * <code>required .CompareType compare_type = 4;</code>
7728        */
7729       public Builder clearCompareType() {
7730         bitField0_ = (bitField0_ & ~0x00000008);
7731         compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
7732         onChanged();
7733         return this;
7734       }
7735 
7736       // required .Comparator comparator = 5;
7737       private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
7738       private com.google.protobuf.SingleFieldBuilder<
7739           org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_;
7740       /**
7741        * <code>required .Comparator comparator = 5;</code>
7742        */
7743       public boolean hasComparator() {
7744         return ((bitField0_ & 0x00000010) == 0x00000010);
7745       }
7746       /**
7747        * <code>required .Comparator comparator = 5;</code>
7748        */
7749       public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() {
7750         if (comparatorBuilder_ == null) {
7751           return comparator_;
7752         } else {
7753           return comparatorBuilder_.getMessage();
7754         }
7755       }
7756       /**
7757        * <code>required .Comparator comparator = 5;</code>
7758        */
7759       public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) {
7760         if (comparatorBuilder_ == null) {
7761           if (value == null) {
7762             throw new NullPointerException();
7763           }
7764           comparator_ = value;
7765           onChanged();
7766         } else {
7767           comparatorBuilder_.setMessage(value);
7768         }
7769         bitField0_ |= 0x00000010;
7770         return this;
7771       }
7772       /**
7773        * <code>required .Comparator comparator = 5;</code>
7774        */
7775       public Builder setComparator(
7776           org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue) {
7777         if (comparatorBuilder_ == null) {
7778           comparator_ = builderForValue.build();
7779           onChanged();
7780         } else {
7781           comparatorBuilder_.setMessage(builderForValue.build());
7782         }
7783         bitField0_ |= 0x00000010;
7784         return this;
7785       }
7786       /**
7787        * <code>required .Comparator comparator = 5;</code>
7788        */
7789       public Builder mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) {
7790         if (comparatorBuilder_ == null) {
7791           if (((bitField0_ & 0x00000010) == 0x00000010) &&
7792               comparator_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) {
7793             comparator_ =
7794               org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(comparator_).mergeFrom(value).buildPartial();
7795           } else {
7796             comparator_ = value;
7797           }
7798           onChanged();
7799         } else {
7800           comparatorBuilder_.mergeFrom(value);
7801         }
7802         bitField0_ |= 0x00000010;
7803         return this;
7804       }
7805       /**
7806        * <code>required .Comparator comparator = 5;</code>
7807        */
7808       public Builder clearComparator() {
7809         if (comparatorBuilder_ == null) {
7810           comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
7811           onChanged();
7812         } else {
7813           comparatorBuilder_.clear();
7814         }
7815         bitField0_ = (bitField0_ & ~0x00000010);
7816         return this;
7817       }
7818       /**
7819        * <code>required .Comparator comparator = 5;</code>
7820        */
7821       public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder getComparatorBuilder() {
7822         bitField0_ |= 0x00000010;
7823         onChanged();
7824         return getComparatorFieldBuilder().getBuilder();
7825       }
7826       /**
7827        * <code>required .Comparator comparator = 5;</code>
7828        */
7829       public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() {
7830         if (comparatorBuilder_ != null) {
7831           return comparatorBuilder_.getMessageOrBuilder();
7832         } else {
7833           return comparator_;
7834         }
7835       }
7836       /**
7837        * <code>required .Comparator comparator = 5;</code>
7838        */
7839       private com.google.protobuf.SingleFieldBuilder<
7840           org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> 
7841           getComparatorFieldBuilder() {
7842         if (comparatorBuilder_ == null) {
7843           comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilder<
7844               org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>(
7845                   comparator_,
7846                   getParentForChildren(),
7847                   isClean());
7848           comparator_ = null;
7849         }
7850         return comparatorBuilder_;
7851       }
7852 
7853       // @@protoc_insertion_point(builder_scope:Condition)
7854     }
7855 
7856     static {
7857       defaultInstance = new Condition(true);
7858       defaultInstance.initFields();
7859     }
7860 
7861     // @@protoc_insertion_point(class_scope:Condition)
7862   }
7863 
7864   public interface MutationProtoOrBuilder
7865       extends com.google.protobuf.MessageOrBuilder {
7866 
7867     // optional bytes row = 1;
7868     /**
7869      * <code>optional bytes row = 1;</code>
7870      */
7871     boolean hasRow();
7872     /**
7873      * <code>optional bytes row = 1;</code>
7874      */
7875     com.google.protobuf.ByteString getRow();
7876 
7877     // optional .MutationProto.MutationType mutate_type = 2;
7878     /**
7879      * <code>optional .MutationProto.MutationType mutate_type = 2;</code>
7880      */
7881     boolean hasMutateType();
7882     /**
7883      * <code>optional .MutationProto.MutationType mutate_type = 2;</code>
7884      */
7885     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType getMutateType();
7886 
7887     // repeated .MutationProto.ColumnValue column_value = 3;
7888     /**
7889      * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
7890      */
7891     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> 
7892         getColumnValueList();
7893     /**
7894      * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
7895      */
7896     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue getColumnValue(int index);
7897     /**
7898      * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
7899      */
7900     int getColumnValueCount();
7901     /**
7902      * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
7903      */
7904     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> 
7905         getColumnValueOrBuilderList();
7906     /**
7907      * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
7908      */
7909     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder getColumnValueOrBuilder(
7910         int index);
7911 
7912     // optional uint64 timestamp = 4;
7913     /**
7914      * <code>optional uint64 timestamp = 4;</code>
7915      */
7916     boolean hasTimestamp();
7917     /**
7918      * <code>optional uint64 timestamp = 4;</code>
7919      */
7920     long getTimestamp();
7921 
7922     // repeated .NameBytesPair attribute = 5;
7923     /**
7924      * <code>repeated .NameBytesPair attribute = 5;</code>
7925      */
7926     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> 
7927         getAttributeList();
7928     /**
7929      * <code>repeated .NameBytesPair attribute = 5;</code>
7930      */
7931     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index);
7932     /**
7933      * <code>repeated .NameBytesPair attribute = 5;</code>
7934      */
7935     int getAttributeCount();
7936     /**
7937      * <code>repeated .NameBytesPair attribute = 5;</code>
7938      */
7939     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
7940         getAttributeOrBuilderList();
7941     /**
7942      * <code>repeated .NameBytesPair attribute = 5;</code>
7943      */
7944     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
7945         int index);
7946 
7947     // optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];
7948     /**
7949      * <code>optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
7950      */
7951     boolean hasDurability();
7952     /**
7953      * <code>optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
7954      */
7955     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability getDurability();
7956 
7957     // optional .TimeRange time_range = 7;
7958     /**
7959      * <code>optional .TimeRange time_range = 7;</code>
7960      *
7961      * <pre>
7962      * For some mutations, a result may be returned, in which case,
7963      * time range can be specified for potential performance gain
7964      * </pre>
7965      */
7966     boolean hasTimeRange();
7967     /**
7968      * <code>optional .TimeRange time_range = 7;</code>
7969      *
7970      * <pre>
7971      * For some mutations, a result may be returned, in which case,
7972      * time range can be specified for potential performance gain
7973      * </pre>
7974      */
7975     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange();
7976     /**
7977      * <code>optional .TimeRange time_range = 7;</code>
7978      *
7979      * <pre>
7980      * For some mutations, a result may be returned, in which case,
7981      * time range can be specified for potential performance gain
7982      * </pre>
7983      */
7984     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder();
7985 
7986     // optional int32 associated_cell_count = 8;
7987     /**
7988      * <code>optional int32 associated_cell_count = 8;</code>
7989      *
7990      * <pre>
7991      * The below count is set when the associated cells are NOT
7992      * part of this protobuf message; they are passed alongside
7993      * and then this Message is a placeholder with metadata.  The
7994      * count is needed to know how many to peel off the block of Cells as
7995      * ours.  NOTE: This is different from the pb managed cell_count of the
7996      * 'cell' field above which is non-null when the cells are pb'd.
7997      * </pre>
7998      */
7999     boolean hasAssociatedCellCount();
8000     /**
8001      * <code>optional int32 associated_cell_count = 8;</code>
8002      *
8003      * <pre>
8004      * The below count is set when the associated cells are NOT
8005      * part of this protobuf message; they are passed alongside
8006      * and then this Message is a placeholder with metadata.  The
8007      * count is needed to know how many to peel off the block of Cells as
8008      * ours.  NOTE: This is different from the pb managed cell_count of the
8009      * 'cell' field above which is non-null when the cells are pb'd.
8010      * </pre>
8011      */
8012     int getAssociatedCellCount();
8013 
8014     // optional uint64 nonce = 9;
8015     /**
8016      * <code>optional uint64 nonce = 9;</code>
8017      */
8018     boolean hasNonce();
8019     /**
8020      * <code>optional uint64 nonce = 9;</code>
8021      */
8022     long getNonce();
8023   }
8024   /**
8025    * Protobuf type {@code MutationProto}
8026    *
8027    * <pre>
8028    **
8029    * A specific mutation inside a mutate request.
8030    * It can be an append, increment, put or delete based
8031    * on the mutation type.  It can be fully filled in or
8032    * only metadata present because data is being carried
8033    * elsewhere outside of pb.
8034    * </pre>
8035    */
8036   public static final class MutationProto extends
8037       com.google.protobuf.GeneratedMessage
8038       implements MutationProtoOrBuilder {
8039     // Use MutationProto.newBuilder() to construct.
8040     private MutationProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
8041       super(builder);
8042       this.unknownFields = builder.getUnknownFields();
8043     }
8044     private MutationProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
8045 
8046     private static final MutationProto defaultInstance;
8047     public static MutationProto getDefaultInstance() {
8048       return defaultInstance;
8049     }
8050 
8051     public MutationProto getDefaultInstanceForType() {
8052       return defaultInstance;
8053     }
8054 
8055     private final com.google.protobuf.UnknownFieldSet unknownFields;
8056     @java.lang.Override
8057     public final com.google.protobuf.UnknownFieldSet
8058         getUnknownFields() {
8059       return this.unknownFields;
8060     }
8061     private MutationProto(
8062         com.google.protobuf.CodedInputStream input,
8063         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8064         throws com.google.protobuf.InvalidProtocolBufferException {
8065       initFields();
8066       int mutable_bitField0_ = 0;
8067       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
8068           com.google.protobuf.UnknownFieldSet.newBuilder();
8069       try {
8070         boolean done = false;
8071         while (!done) {
8072           int tag = input.readTag();
8073           switch (tag) {
8074             case 0:
8075               done = true;
8076               break;
8077             default: {
8078               if (!parseUnknownField(input, unknownFields,
8079                                      extensionRegistry, tag)) {
8080                 done = true;
8081               }
8082               break;
8083             }
8084             case 10: {
8085               bitField0_ |= 0x00000001;
8086               row_ = input.readBytes();
8087               break;
8088             }
8089             case 16: {
8090               int rawValue = input.readEnum();
8091               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.valueOf(rawValue);
8092               if (value == null) {
8093                 unknownFields.mergeVarintField(2, rawValue);
8094               } else {
8095                 bitField0_ |= 0x00000002;
8096                 mutateType_ = value;
8097               }
8098               break;
8099             }
8100             case 26: {
8101               if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
8102                 columnValue_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue>();
8103                 mutable_bitField0_ |= 0x00000004;
8104               }
8105               columnValue_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.PARSER, extensionRegistry));
8106               break;
8107             }
8108             case 32: {
8109               bitField0_ |= 0x00000004;
8110               timestamp_ = input.readUInt64();
8111               break;
8112             }
8113             case 42: {
8114               if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
8115                 attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>();
8116                 mutable_bitField0_ |= 0x00000010;
8117               }
8118               attribute_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry));
8119               break;
8120             }
8121             case 48: {
8122               int rawValue = input.readEnum();
8123               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.valueOf(rawValue);
8124               if (value == null) {
8125                 unknownFields.mergeVarintField(6, rawValue);
8126               } else {
8127                 bitField0_ |= 0x00000008;
8128                 durability_ = value;
8129               }
8130               break;
8131             }
8132             case 58: {
8133               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null;
8134               if (((bitField0_ & 0x00000010) == 0x00000010)) {
8135                 subBuilder = timeRange_.toBuilder();
8136               }
8137               timeRange_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry);
8138               if (subBuilder != null) {
8139                 subBuilder.mergeFrom(timeRange_);
8140                 timeRange_ = subBuilder.buildPartial();
8141               }
8142               bitField0_ |= 0x00000010;
8143               break;
8144             }
8145             case 64: {
8146               bitField0_ |= 0x00000020;
8147               associatedCellCount_ = input.readInt32();
8148               break;
8149             }
8150             case 72: {
8151               bitField0_ |= 0x00000040;
8152               nonce_ = input.readUInt64();
8153               break;
8154             }
8155           }
8156         }
8157       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8158         throw e.setUnfinishedMessage(this);
8159       } catch (java.io.IOException e) {
8160         throw new com.google.protobuf.InvalidProtocolBufferException(
8161             e.getMessage()).setUnfinishedMessage(this);
8162       } finally {
8163         if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
8164           columnValue_ = java.util.Collections.unmodifiableList(columnValue_);
8165         }
8166         if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
8167           attribute_ = java.util.Collections.unmodifiableList(attribute_);
8168         }
8169         this.unknownFields = unknownFields.build();
8170         makeExtensionsImmutable();
8171       }
8172     }
8173     public static final com.google.protobuf.Descriptors.Descriptor
8174         getDescriptor() {
8175       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_descriptor;
8176     }
8177 
8178     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8179         internalGetFieldAccessorTable() {
8180       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_fieldAccessorTable
8181           .ensureFieldAccessorsInitialized(
8182               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder.class);
8183     }
8184 
8185     public static com.google.protobuf.Parser<MutationProto> PARSER =
8186         new com.google.protobuf.AbstractParser<MutationProto>() {
8187       public MutationProto parsePartialFrom(
8188           com.google.protobuf.CodedInputStream input,
8189           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8190           throws com.google.protobuf.InvalidProtocolBufferException {
8191         return new MutationProto(input, extensionRegistry);
8192       }
8193     };
8194 
8195     @java.lang.Override
8196     public com.google.protobuf.Parser<MutationProto> getParserForType() {
8197       return PARSER;
8198     }
8199 
8200     /**
8201      * Protobuf enum {@code MutationProto.Durability}
8202      */
8203     public enum Durability
8204         implements com.google.protobuf.ProtocolMessageEnum {
8205       /**
8206        * <code>USE_DEFAULT = 0;</code>
8207        */
8208       USE_DEFAULT(0, 0),
8209       /**
8210        * <code>SKIP_WAL = 1;</code>
8211        */
8212       SKIP_WAL(1, 1),
8213       /**
8214        * <code>ASYNC_WAL = 2;</code>
8215        */
8216       ASYNC_WAL(2, 2),
8217       /**
8218        * <code>SYNC_WAL = 3;</code>
8219        */
8220       SYNC_WAL(3, 3),
8221       /**
8222        * <code>FSYNC_WAL = 4;</code>
8223        */
8224       FSYNC_WAL(4, 4),
8225       ;
8226 
8227       /**
8228        * <code>USE_DEFAULT = 0;</code>
8229        */
8230       public static final int USE_DEFAULT_VALUE = 0;
8231       /**
8232        * <code>SKIP_WAL = 1;</code>
8233        */
8234       public static final int SKIP_WAL_VALUE = 1;
8235       /**
8236        * <code>ASYNC_WAL = 2;</code>
8237        */
8238       public static final int ASYNC_WAL_VALUE = 2;
8239       /**
8240        * <code>SYNC_WAL = 3;</code>
8241        */
8242       public static final int SYNC_WAL_VALUE = 3;
8243       /**
8244        * <code>FSYNC_WAL = 4;</code>
8245        */
8246       public static final int FSYNC_WAL_VALUE = 4;
8247 
8248 
8249       public final int getNumber() { return value; }
8250 
8251       public static Durability valueOf(int value) {
8252         switch (value) {
8253           case 0: return USE_DEFAULT;
8254           case 1: return SKIP_WAL;
8255           case 2: return ASYNC_WAL;
8256           case 3: return SYNC_WAL;
8257           case 4: return FSYNC_WAL;
8258           default: return null;
8259         }
8260       }
8261 
8262       public static com.google.protobuf.Internal.EnumLiteMap<Durability>
8263           internalGetValueMap() {
8264         return internalValueMap;
8265       }
8266       private static com.google.protobuf.Internal.EnumLiteMap<Durability>
8267           internalValueMap =
8268             new com.google.protobuf.Internal.EnumLiteMap<Durability>() {
8269               public Durability findValueByNumber(int number) {
8270                 return Durability.valueOf(number);
8271               }
8272             };
8273 
8274       public final com.google.protobuf.Descriptors.EnumValueDescriptor
8275           getValueDescriptor() {
8276         return getDescriptor().getValues().get(index);
8277       }
8278       public final com.google.protobuf.Descriptors.EnumDescriptor
8279           getDescriptorForType() {
8280         return getDescriptor();
8281       }
8282       public static final com.google.protobuf.Descriptors.EnumDescriptor
8283           getDescriptor() {
8284         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDescriptor().getEnumTypes().get(0);
8285       }
8286 
8287       private static final Durability[] VALUES = values();
8288 
8289       public static Durability valueOf(
8290           com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
8291         if (desc.getType() != getDescriptor()) {
8292           throw new java.lang.IllegalArgumentException(
8293             "EnumValueDescriptor is not for this type.");
8294         }
8295         return VALUES[desc.getIndex()];
8296       }
8297 
8298       private final int index;
8299       private final int value;
8300 
8301       private Durability(int index, int value) {
8302         this.index = index;
8303         this.value = value;
8304       }
8305 
8306       // @@protoc_insertion_point(enum_scope:MutationProto.Durability)
8307     }
8308 
8309     /**
8310      * Protobuf enum {@code MutationProto.MutationType}
8311      */
8312     public enum MutationType
8313         implements com.google.protobuf.ProtocolMessageEnum {
8314       /**
8315        * <code>APPEND = 0;</code>
8316        */
8317       APPEND(0, 0),
8318       /**
8319        * <code>INCREMENT = 1;</code>
8320        */
8321       INCREMENT(1, 1),
8322       /**
8323        * <code>PUT = 2;</code>
8324        */
8325       PUT(2, 2),
8326       /**
8327        * <code>DELETE = 3;</code>
8328        */
8329       DELETE(3, 3),
8330       ;
8331 
8332       /**
8333        * <code>APPEND = 0;</code>
8334        */
8335       public static final int APPEND_VALUE = 0;
8336       /**
8337        * <code>INCREMENT = 1;</code>
8338        */
8339       public static final int INCREMENT_VALUE = 1;
8340       /**
8341        * <code>PUT = 2;</code>
8342        */
8343       public static final int PUT_VALUE = 2;
8344       /**
8345        * <code>DELETE = 3;</code>
8346        */
8347       public static final int DELETE_VALUE = 3;
8348 
8349 
8350       public final int getNumber() { return value; }
8351 
8352       public static MutationType valueOf(int value) {
8353         switch (value) {
8354           case 0: return APPEND;
8355           case 1: return INCREMENT;
8356           case 2: return PUT;
8357           case 3: return DELETE;
8358           default: return null;
8359         }
8360       }
8361 
8362       public static com.google.protobuf.Internal.EnumLiteMap<MutationType>
8363           internalGetValueMap() {
8364         return internalValueMap;
8365       }
8366       private static com.google.protobuf.Internal.EnumLiteMap<MutationType>
8367           internalValueMap =
8368             new com.google.protobuf.Internal.EnumLiteMap<MutationType>() {
8369               public MutationType findValueByNumber(int number) {
8370                 return MutationType.valueOf(number);
8371               }
8372             };
8373 
8374       public final com.google.protobuf.Descriptors.EnumValueDescriptor
8375           getValueDescriptor() {
8376         return getDescriptor().getValues().get(index);
8377       }
8378       public final com.google.protobuf.Descriptors.EnumDescriptor
8379           getDescriptorForType() {
8380         return getDescriptor();
8381       }
8382       public static final com.google.protobuf.Descriptors.EnumDescriptor
8383           getDescriptor() {
8384         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDescriptor().getEnumTypes().get(1);
8385       }
8386 
8387       private static final MutationType[] VALUES = values();
8388 
8389       public static MutationType valueOf(
8390           com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
8391         if (desc.getType() != getDescriptor()) {
8392           throw new java.lang.IllegalArgumentException(
8393             "EnumValueDescriptor is not for this type.");
8394         }
8395         return VALUES[desc.getIndex()];
8396       }
8397 
8398       private final int index;
8399       private final int value;
8400 
8401       private MutationType(int index, int value) {
8402         this.index = index;
8403         this.value = value;
8404       }
8405 
8406       // @@protoc_insertion_point(enum_scope:MutationProto.MutationType)
8407     }
8408 
8409     /**
8410      * Protobuf enum {@code MutationProto.DeleteType}
8411      */
8412     public enum DeleteType
8413         implements com.google.protobuf.ProtocolMessageEnum {
8414       /**
8415        * <code>DELETE_ONE_VERSION = 0;</code>
8416        */
8417       DELETE_ONE_VERSION(0, 0),
8418       /**
8419        * <code>DELETE_MULTIPLE_VERSIONS = 1;</code>
8420        */
8421       DELETE_MULTIPLE_VERSIONS(1, 1),
8422       /**
8423        * <code>DELETE_FAMILY = 2;</code>
8424        */
8425       DELETE_FAMILY(2, 2),
8426       /**
8427        * <code>DELETE_FAMILY_VERSION = 3;</code>
8428        */
8429       DELETE_FAMILY_VERSION(3, 3),
8430       ;
8431 
8432       /**
8433        * <code>DELETE_ONE_VERSION = 0;</code>
8434        */
8435       public static final int DELETE_ONE_VERSION_VALUE = 0;
8436       /**
8437        * <code>DELETE_MULTIPLE_VERSIONS = 1;</code>
8438        */
8439       public static final int DELETE_MULTIPLE_VERSIONS_VALUE = 1;
8440       /**
8441        * <code>DELETE_FAMILY = 2;</code>
8442        */
8443       public static final int DELETE_FAMILY_VALUE = 2;
8444       /**
8445        * <code>DELETE_FAMILY_VERSION = 3;</code>
8446        */
8447       public static final int DELETE_FAMILY_VERSION_VALUE = 3;
8448 
8449 
8450       public final int getNumber() { return value; }
8451 
8452       public static DeleteType valueOf(int value) {
8453         switch (value) {
8454           case 0: return DELETE_ONE_VERSION;
8455           case 1: return DELETE_MULTIPLE_VERSIONS;
8456           case 2: return DELETE_FAMILY;
8457           case 3: return DELETE_FAMILY_VERSION;
8458           default: return null;
8459         }
8460       }
8461 
8462       public static com.google.protobuf.Internal.EnumLiteMap<DeleteType>
8463           internalGetValueMap() {
8464         return internalValueMap;
8465       }
8466       private static com.google.protobuf.Internal.EnumLiteMap<DeleteType>
8467           internalValueMap =
8468             new com.google.protobuf.Internal.EnumLiteMap<DeleteType>() {
8469               public DeleteType findValueByNumber(int number) {
8470                 return DeleteType.valueOf(number);
8471               }
8472             };
8473 
8474       public final com.google.protobuf.Descriptors.EnumValueDescriptor
8475           getValueDescriptor() {
8476         return getDescriptor().getValues().get(index);
8477       }
8478       public final com.google.protobuf.Descriptors.EnumDescriptor
8479           getDescriptorForType() {
8480         return getDescriptor();
8481       }
8482       public static final com.google.protobuf.Descriptors.EnumDescriptor
8483           getDescriptor() {
8484         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDescriptor().getEnumTypes().get(2);
8485       }
8486 
8487       private static final DeleteType[] VALUES = values();
8488 
8489       public static DeleteType valueOf(
8490           com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
8491         if (desc.getType() != getDescriptor()) {
8492           throw new java.lang.IllegalArgumentException(
8493             "EnumValueDescriptor is not for this type.");
8494         }
8495         return VALUES[desc.getIndex()];
8496       }
8497 
8498       private final int index;
8499       private final int value;
8500 
8501       private DeleteType(int index, int value) {
8502         this.index = index;
8503         this.value = value;
8504       }
8505 
8506       // @@protoc_insertion_point(enum_scope:MutationProto.DeleteType)
8507     }
8508 
8509     public interface ColumnValueOrBuilder
8510         extends com.google.protobuf.MessageOrBuilder {
8511 
8512       // required bytes family = 1;
8513       /**
8514        * <code>required bytes family = 1;</code>
8515        */
8516       boolean hasFamily();
8517       /**
8518        * <code>required bytes family = 1;</code>
8519        */
8520       com.google.protobuf.ByteString getFamily();
8521 
8522       // repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;
8523       /**
8524        * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
8525        */
8526       java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> 
8527           getQualifierValueList();
8528       /**
8529        * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
8530        */
8531       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getQualifierValue(int index);
8532       /**
8533        * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
8534        */
8535       int getQualifierValueCount();
8536       /**
8537        * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
8538        */
8539       java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> 
8540           getQualifierValueOrBuilderList();
8541       /**
8542        * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
8543        */
8544       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder(
8545           int index);
8546     }
8547     /**
8548      * Protobuf type {@code MutationProto.ColumnValue}
8549      */
8550     public static final class ColumnValue extends
8551         com.google.protobuf.GeneratedMessage
8552         implements ColumnValueOrBuilder {
8553       // Use ColumnValue.newBuilder() to construct.
8554       private ColumnValue(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
8555         super(builder);
8556         this.unknownFields = builder.getUnknownFields();
8557       }
8558       private ColumnValue(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
8559 
8560       private static final ColumnValue defaultInstance;
8561       public static ColumnValue getDefaultInstance() {
8562         return defaultInstance;
8563       }
8564 
8565       public ColumnValue getDefaultInstanceForType() {
8566         return defaultInstance;
8567       }
8568 
8569       private final com.google.protobuf.UnknownFieldSet unknownFields;
8570       @java.lang.Override
8571       public final com.google.protobuf.UnknownFieldSet
8572           getUnknownFields() {
8573         return this.unknownFields;
8574       }
8575       private ColumnValue(
8576           com.google.protobuf.CodedInputStream input,
8577           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8578           throws com.google.protobuf.InvalidProtocolBufferException {
8579         initFields();
8580         int mutable_bitField0_ = 0;
8581         com.google.protobuf.UnknownFieldSet.Builder unknownFields =
8582             com.google.protobuf.UnknownFieldSet.newBuilder();
8583         try {
8584           boolean done = false;
8585           while (!done) {
8586             int tag = input.readTag();
8587             switch (tag) {
8588               case 0:
8589                 done = true;
8590                 break;
8591               default: {
8592                 if (!parseUnknownField(input, unknownFields,
8593                                        extensionRegistry, tag)) {
8594                   done = true;
8595                 }
8596                 break;
8597               }
8598               case 10: {
8599                 bitField0_ |= 0x00000001;
8600                 family_ = input.readBytes();
8601                 break;
8602               }
8603               case 18: {
8604                 if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
8605                   qualifierValue_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue>();
8606                   mutable_bitField0_ |= 0x00000002;
8607                 }
8608                 qualifierValue_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.PARSER, extensionRegistry));
8609                 break;
8610               }
8611             }
8612           }
8613         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8614           throw e.setUnfinishedMessage(this);
8615         } catch (java.io.IOException e) {
8616           throw new com.google.protobuf.InvalidProtocolBufferException(
8617               e.getMessage()).setUnfinishedMessage(this);
8618         } finally {
8619           if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
8620             qualifierValue_ = java.util.Collections.unmodifiableList(qualifierValue_);
8621           }
8622           this.unknownFields = unknownFields.build();
8623           makeExtensionsImmutable();
8624         }
8625       }
8626       public static final com.google.protobuf.Descriptors.Descriptor
8627           getDescriptor() {
8628         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_descriptor;
8629       }
8630 
8631       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8632           internalGetFieldAccessorTable() {
8633         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_fieldAccessorTable
8634             .ensureFieldAccessorsInitialized(
8635                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder.class);
8636       }
8637 
8638       public static com.google.protobuf.Parser<ColumnValue> PARSER =
8639           new com.google.protobuf.AbstractParser<ColumnValue>() {
8640         public ColumnValue parsePartialFrom(
8641             com.google.protobuf.CodedInputStream input,
8642             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8643             throws com.google.protobuf.InvalidProtocolBufferException {
8644           return new ColumnValue(input, extensionRegistry);
8645         }
8646       };
8647 
8648       @java.lang.Override
8649       public com.google.protobuf.Parser<ColumnValue> getParserForType() {
8650         return PARSER;
8651       }
8652 
8653       public interface QualifierValueOrBuilder
8654           extends com.google.protobuf.MessageOrBuilder {
8655 
8656         // optional bytes qualifier = 1;
8657         /**
8658          * <code>optional bytes qualifier = 1;</code>
8659          */
8660         boolean hasQualifier();
8661         /**
8662          * <code>optional bytes qualifier = 1;</code>
8663          */
8664         com.google.protobuf.ByteString getQualifier();
8665 
8666         // optional bytes value = 2;
8667         /**
8668          * <code>optional bytes value = 2;</code>
8669          */
8670         boolean hasValue();
8671         /**
8672          * <code>optional bytes value = 2;</code>
8673          */
8674         com.google.protobuf.ByteString getValue();
8675 
8676         // optional uint64 timestamp = 3;
8677         /**
8678          * <code>optional uint64 timestamp = 3;</code>
8679          */
8680         boolean hasTimestamp();
8681         /**
8682          * <code>optional uint64 timestamp = 3;</code>
8683          */
8684         long getTimestamp();
8685 
8686         // optional .MutationProto.DeleteType delete_type = 4;
8687         /**
8688          * <code>optional .MutationProto.DeleteType delete_type = 4;</code>
8689          */
8690         boolean hasDeleteType();
8691         /**
8692          * <code>optional .MutationProto.DeleteType delete_type = 4;</code>
8693          */
8694         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType();
8695 
8696         // optional bytes tags = 5;
8697         /**
8698          * <code>optional bytes tags = 5;</code>
8699          */
8700         boolean hasTags();
8701         /**
8702          * <code>optional bytes tags = 5;</code>
8703          */
8704         com.google.protobuf.ByteString getTags();
8705       }
8706       /**
8707        * Protobuf type {@code MutationProto.ColumnValue.QualifierValue}
8708        */
8709       public static final class QualifierValue extends
8710           com.google.protobuf.GeneratedMessage
8711           implements QualifierValueOrBuilder {
8712         // Use QualifierValue.newBuilder() to construct.
8713         private QualifierValue(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
8714           super(builder);
8715           this.unknownFields = builder.getUnknownFields();
8716         }
8717         private QualifierValue(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
8718 
8719         private static final QualifierValue defaultInstance;
8720         public static QualifierValue getDefaultInstance() {
8721           return defaultInstance;
8722         }
8723 
8724         public QualifierValue getDefaultInstanceForType() {
8725           return defaultInstance;
8726         }
8727 
8728         private final com.google.protobuf.UnknownFieldSet unknownFields;
8729         @java.lang.Override
8730         public final com.google.protobuf.UnknownFieldSet
8731             getUnknownFields() {
8732           return this.unknownFields;
8733         }
8734         private QualifierValue(
8735             com.google.protobuf.CodedInputStream input,
8736             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8737             throws com.google.protobuf.InvalidProtocolBufferException {
8738           initFields();
8739           int mutable_bitField0_ = 0;
8740           com.google.protobuf.UnknownFieldSet.Builder unknownFields =
8741               com.google.protobuf.UnknownFieldSet.newBuilder();
8742           try {
8743             boolean done = false;
8744             while (!done) {
8745               int tag = input.readTag();
8746               switch (tag) {
8747                 case 0:
8748                   done = true;
8749                   break;
8750                 default: {
8751                   if (!parseUnknownField(input, unknownFields,
8752                                          extensionRegistry, tag)) {
8753                     done = true;
8754                   }
8755                   break;
8756                 }
8757                 case 10: {
8758                   bitField0_ |= 0x00000001;
8759                   qualifier_ = input.readBytes();
8760                   break;
8761                 }
8762                 case 18: {
8763                   bitField0_ |= 0x00000002;
8764                   value_ = input.readBytes();
8765                   break;
8766                 }
8767                 case 24: {
8768                   bitField0_ |= 0x00000004;
8769                   timestamp_ = input.readUInt64();
8770                   break;
8771                 }
8772                 case 32: {
8773                   int rawValue = input.readEnum();
8774                   org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.valueOf(rawValue);
8775                   if (value == null) {
8776                     unknownFields.mergeVarintField(4, rawValue);
8777                   } else {
8778                     bitField0_ |= 0x00000008;
8779                     deleteType_ = value;
8780                   }
8781                   break;
8782                 }
8783                 case 42: {
8784                   bitField0_ |= 0x00000010;
8785                   tags_ = input.readBytes();
8786                   break;
8787                 }
8788               }
8789             }
8790           } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8791             throw e.setUnfinishedMessage(this);
8792           } catch (java.io.IOException e) {
8793             throw new com.google.protobuf.InvalidProtocolBufferException(
8794                 e.getMessage()).setUnfinishedMessage(this);
8795           } finally {
8796             this.unknownFields = unknownFields.build();
8797             makeExtensionsImmutable();
8798           }
8799         }
8800         public static final com.google.protobuf.Descriptors.Descriptor
8801             getDescriptor() {
8802           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_QualifierValue_descriptor;
8803         }
8804 
8805         protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
8806             internalGetFieldAccessorTable() {
8807           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable
8808               .ensureFieldAccessorsInitialized(
8809                   org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder.class);
8810         }
8811 
8812         public static com.google.protobuf.Parser<QualifierValue> PARSER =
8813             new com.google.protobuf.AbstractParser<QualifierValue>() {
8814           public QualifierValue parsePartialFrom(
8815               com.google.protobuf.CodedInputStream input,
8816               com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8817               throws com.google.protobuf.InvalidProtocolBufferException {
8818             return new QualifierValue(input, extensionRegistry);
8819           }
8820         };
8821 
8822         @java.lang.Override
8823         public com.google.protobuf.Parser<QualifierValue> getParserForType() {
8824           return PARSER;
8825         }
8826 
8827         private int bitField0_;
8828         // optional bytes qualifier = 1;
8829         public static final int QUALIFIER_FIELD_NUMBER = 1;
8830         private com.google.protobuf.ByteString qualifier_;
8831         /**
8832          * <code>optional bytes qualifier = 1;</code>
8833          */
8834         public boolean hasQualifier() {
8835           return ((bitField0_ & 0x00000001) == 0x00000001);
8836         }
8837         /**
8838          * <code>optional bytes qualifier = 1;</code>
8839          */
8840         public com.google.protobuf.ByteString getQualifier() {
8841           return qualifier_;
8842         }
8843 
8844         // optional bytes value = 2;
8845         public static final int VALUE_FIELD_NUMBER = 2;
8846         private com.google.protobuf.ByteString value_;
8847         /**
8848          * <code>optional bytes value = 2;</code>
8849          */
8850         public boolean hasValue() {
8851           return ((bitField0_ & 0x00000002) == 0x00000002);
8852         }
8853         /**
8854          * <code>optional bytes value = 2;</code>
8855          */
8856         public com.google.protobuf.ByteString getValue() {
8857           return value_;
8858         }
8859 
8860         // optional uint64 timestamp = 3;
8861         public static final int TIMESTAMP_FIELD_NUMBER = 3;
8862         private long timestamp_;
8863         /**
8864          * <code>optional uint64 timestamp = 3;</code>
8865          */
8866         public boolean hasTimestamp() {
8867           return ((bitField0_ & 0x00000004) == 0x00000004);
8868         }
8869         /**
8870          * <code>optional uint64 timestamp = 3;</code>
8871          */
8872         public long getTimestamp() {
8873           return timestamp_;
8874         }
8875 
8876         // optional .MutationProto.DeleteType delete_type = 4;
8877         public static final int DELETE_TYPE_FIELD_NUMBER = 4;
8878         private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType deleteType_;
8879         /**
8880          * <code>optional .MutationProto.DeleteType delete_type = 4;</code>
8881          */
8882         public boolean hasDeleteType() {
8883           return ((bitField0_ & 0x00000008) == 0x00000008);
8884         }
8885         /**
8886          * <code>optional .MutationProto.DeleteType delete_type = 4;</code>
8887          */
8888         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType() {
8889           return deleteType_;
8890         }
8891 
8892         // optional bytes tags = 5;
8893         public static final int TAGS_FIELD_NUMBER = 5;
8894         private com.google.protobuf.ByteString tags_;
8895         /**
8896          * <code>optional bytes tags = 5;</code>
8897          */
8898         public boolean hasTags() {
8899           return ((bitField0_ & 0x00000010) == 0x00000010);
8900         }
8901         /**
8902          * <code>optional bytes tags = 5;</code>
8903          */
8904         public com.google.protobuf.ByteString getTags() {
8905           return tags_;
8906         }
8907 
8908         private void initFields() {
8909           qualifier_ = com.google.protobuf.ByteString.EMPTY;
8910           value_ = com.google.protobuf.ByteString.EMPTY;
8911           timestamp_ = 0L;
8912           deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION;
8913           tags_ = com.google.protobuf.ByteString.EMPTY;
8914         }
8915         private byte memoizedIsInitialized = -1;
8916         public final boolean isInitialized() {
8917           byte isInitialized = memoizedIsInitialized;
8918           if (isInitialized != -1) return isInitialized == 1;
8919 
8920           memoizedIsInitialized = 1;
8921           return true;
8922         }
8923 
8924         public void writeTo(com.google.protobuf.CodedOutputStream output)
8925                             throws java.io.IOException {
8926           getSerializedSize();
8927           if (((bitField0_ & 0x00000001) == 0x00000001)) {
8928             output.writeBytes(1, qualifier_);
8929           }
8930           if (((bitField0_ & 0x00000002) == 0x00000002)) {
8931             output.writeBytes(2, value_);
8932           }
8933           if (((bitField0_ & 0x00000004) == 0x00000004)) {
8934             output.writeUInt64(3, timestamp_);
8935           }
8936           if (((bitField0_ & 0x00000008) == 0x00000008)) {
8937             output.writeEnum(4, deleteType_.getNumber());
8938           }
8939           if (((bitField0_ & 0x00000010) == 0x00000010)) {
8940             output.writeBytes(5, tags_);
8941           }
8942           getUnknownFields().writeTo(output);
8943         }
8944 
8945         private int memoizedSerializedSize = -1;
8946         public int getSerializedSize() {
8947           int size = memoizedSerializedSize;
8948           if (size != -1) return size;
8949 
8950           size = 0;
8951           if (((bitField0_ & 0x00000001) == 0x00000001)) {
8952             size += com.google.protobuf.CodedOutputStream
8953               .computeBytesSize(1, qualifier_);
8954           }
8955           if (((bitField0_ & 0x00000002) == 0x00000002)) {
8956             size += com.google.protobuf.CodedOutputStream
8957               .computeBytesSize(2, value_);
8958           }
8959           if (((bitField0_ & 0x00000004) == 0x00000004)) {
8960             size += com.google.protobuf.CodedOutputStream
8961               .computeUInt64Size(3, timestamp_);
8962           }
8963           if (((bitField0_ & 0x00000008) == 0x00000008)) {
8964             size += com.google.protobuf.CodedOutputStream
8965               .computeEnumSize(4, deleteType_.getNumber());
8966           }
8967           if (((bitField0_ & 0x00000010) == 0x00000010)) {
8968             size += com.google.protobuf.CodedOutputStream
8969               .computeBytesSize(5, tags_);
8970           }
8971           size += getUnknownFields().getSerializedSize();
8972           memoizedSerializedSize = size;
8973           return size;
8974         }
8975 
8976         private static final long serialVersionUID = 0L;
8977         @java.lang.Override
8978         protected java.lang.Object writeReplace()
8979             throws java.io.ObjectStreamException {
8980           return super.writeReplace();
8981         }
8982 
8983         @java.lang.Override
8984         public boolean equals(final java.lang.Object obj) {
8985           if (obj == this) {
8986            return true;
8987           }
8988           if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue)) {
8989             return super.equals(obj);
8990           }
8991           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) obj;
8992 
8993           boolean result = true;
8994           result = result && (hasQualifier() == other.hasQualifier());
8995           if (hasQualifier()) {
8996             result = result && getQualifier()
8997                 .equals(other.getQualifier());
8998           }
8999           result = result && (hasValue() == other.hasValue());
9000           if (hasValue()) {
9001             result = result && getValue()
9002                 .equals(other.getValue());
9003           }
9004           result = result && (hasTimestamp() == other.hasTimestamp());
9005           if (hasTimestamp()) {
9006             result = result && (getTimestamp()
9007                 == other.getTimestamp());
9008           }
9009           result = result && (hasDeleteType() == other.hasDeleteType());
9010           if (hasDeleteType()) {
9011             result = result &&
9012                 (getDeleteType() == other.getDeleteType());
9013           }
9014           result = result && (hasTags() == other.hasTags());
9015           if (hasTags()) {
9016             result = result && getTags()
9017                 .equals(other.getTags());
9018           }
9019           result = result &&
9020               getUnknownFields().equals(other.getUnknownFields());
9021           return result;
9022         }
9023 
9024         private int memoizedHashCode = 0;
9025         @java.lang.Override
9026         public int hashCode() {
9027           if (memoizedHashCode != 0) {
9028             return memoizedHashCode;
9029           }
9030           int hash = 41;
9031           hash = (19 * hash) + getDescriptorForType().hashCode();
9032           if (hasQualifier()) {
9033             hash = (37 * hash) + QUALIFIER_FIELD_NUMBER;
9034             hash = (53 * hash) + getQualifier().hashCode();
9035           }
9036           if (hasValue()) {
9037             hash = (37 * hash) + VALUE_FIELD_NUMBER;
9038             hash = (53 * hash) + getValue().hashCode();
9039           }
9040           if (hasTimestamp()) {
9041             hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER;
9042             hash = (53 * hash) + hashLong(getTimestamp());
9043           }
9044           if (hasDeleteType()) {
9045             hash = (37 * hash) + DELETE_TYPE_FIELD_NUMBER;
9046             hash = (53 * hash) + hashEnum(getDeleteType());
9047           }
9048           if (hasTags()) {
9049             hash = (37 * hash) + TAGS_FIELD_NUMBER;
9050             hash = (53 * hash) + getTags().hashCode();
9051           }
9052           hash = (29 * hash) + getUnknownFields().hashCode();
9053           memoizedHashCode = hash;
9054           return hash;
9055         }
9056 
9057         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(
9058             com.google.protobuf.ByteString data)
9059             throws com.google.protobuf.InvalidProtocolBufferException {
9060           return PARSER.parseFrom(data);
9061         }
9062         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(
9063             com.google.protobuf.ByteString data,
9064             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9065             throws com.google.protobuf.InvalidProtocolBufferException {
9066           return PARSER.parseFrom(data, extensionRegistry);
9067         }
9068         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(byte[] data)
9069             throws com.google.protobuf.InvalidProtocolBufferException {
9070           return PARSER.parseFrom(data);
9071         }
9072         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(
9073             byte[] data,
9074             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9075             throws com.google.protobuf.InvalidProtocolBufferException {
9076           return PARSER.parseFrom(data, extensionRegistry);
9077         }
9078         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(java.io.InputStream input)
9079             throws java.io.IOException {
9080           return PARSER.parseFrom(input);
9081         }
9082         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(
9083             java.io.InputStream input,
9084             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9085             throws java.io.IOException {
9086           return PARSER.parseFrom(input, extensionRegistry);
9087         }
9088         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseDelimitedFrom(java.io.InputStream input)
9089             throws java.io.IOException {
9090           return PARSER.parseDelimitedFrom(input);
9091         }
9092         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseDelimitedFrom(
9093             java.io.InputStream input,
9094             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9095             throws java.io.IOException {
9096           return PARSER.parseDelimitedFrom(input, extensionRegistry);
9097         }
9098         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(
9099             com.google.protobuf.CodedInputStream input)
9100             throws java.io.IOException {
9101           return PARSER.parseFrom(input);
9102         }
9103         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(
9104             com.google.protobuf.CodedInputStream input,
9105             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9106             throws java.io.IOException {
9107           return PARSER.parseFrom(input, extensionRegistry);
9108         }
9109 
9110         public static Builder newBuilder() { return Builder.create(); }
9111         public Builder newBuilderForType() { return newBuilder(); }
9112         public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue prototype) {
9113           return newBuilder().mergeFrom(prototype);
9114         }
9115         public Builder toBuilder() { return newBuilder(this); }
9116 
9117         @java.lang.Override
9118         protected Builder newBuilderForType(
9119             com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9120           Builder builder = new Builder(parent);
9121           return builder;
9122         }
9123         /**
9124          * Protobuf type {@code MutationProto.ColumnValue.QualifierValue}
9125          */
9126         public static final class Builder extends
9127             com.google.protobuf.GeneratedMessage.Builder<Builder>
9128            implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder {
9129           public static final com.google.protobuf.Descriptors.Descriptor
9130               getDescriptor() {
9131             return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_QualifierValue_descriptor;
9132           }
9133 
9134           protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
9135               internalGetFieldAccessorTable() {
9136             return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable
9137                 .ensureFieldAccessorsInitialized(
9138                     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder.class);
9139           }
9140 
9141           // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.newBuilder()
9142           private Builder() {
9143             maybeForceBuilderInitialization();
9144           }
9145 
9146           private Builder(
9147               com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9148             super(parent);
9149             maybeForceBuilderInitialization();
9150           }
9151           private void maybeForceBuilderInitialization() {
9152             if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
9153             }
9154           }
9155           private static Builder create() {
9156             return new Builder();
9157           }
9158 
9159           public Builder clear() {
9160             super.clear();
9161             qualifier_ = com.google.protobuf.ByteString.EMPTY;
9162             bitField0_ = (bitField0_ & ~0x00000001);
9163             value_ = com.google.protobuf.ByteString.EMPTY;
9164             bitField0_ = (bitField0_ & ~0x00000002);
9165             timestamp_ = 0L;
9166             bitField0_ = (bitField0_ & ~0x00000004);
9167             deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION;
9168             bitField0_ = (bitField0_ & ~0x00000008);
9169             tags_ = com.google.protobuf.ByteString.EMPTY;
9170             bitField0_ = (bitField0_ & ~0x00000010);
9171             return this;
9172           }
9173 
9174           public Builder clone() {
9175             return create().mergeFrom(buildPartial());
9176           }
9177 
9178           public com.google.protobuf.Descriptors.Descriptor
9179               getDescriptorForType() {
9180             return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_QualifierValue_descriptor;
9181           }
9182 
9183           public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getDefaultInstanceForType() {
9184             return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance();
9185           }
9186 
9187           public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue build() {
9188             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue result = buildPartial();
9189             if (!result.isInitialized()) {
9190               throw newUninitializedMessageException(result);
9191             }
9192             return result;
9193           }
9194 
9195           public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue buildPartial() {
9196             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue(this);
9197             int from_bitField0_ = bitField0_;
9198             int to_bitField0_ = 0;
9199             if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
9200               to_bitField0_ |= 0x00000001;
9201             }
9202             result.qualifier_ = qualifier_;
9203             if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
9204               to_bitField0_ |= 0x00000002;
9205             }
9206             result.value_ = value_;
9207             if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
9208               to_bitField0_ |= 0x00000004;
9209             }
9210             result.timestamp_ = timestamp_;
9211             if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
9212               to_bitField0_ |= 0x00000008;
9213             }
9214             result.deleteType_ = deleteType_;
9215             if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
9216               to_bitField0_ |= 0x00000010;
9217             }
9218             result.tags_ = tags_;
9219             result.bitField0_ = to_bitField0_;
9220             onBuilt();
9221             return result;
9222           }
9223 
9224           public Builder mergeFrom(com.google.protobuf.Message other) {
9225             if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) {
9226               return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue)other);
9227             } else {
9228               super.mergeFrom(other);
9229               return this;
9230             }
9231           }
9232 
9233           public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue other) {
9234             if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance()) return this;
9235             if (other.hasQualifier()) {
9236               setQualifier(other.getQualifier());
9237             }
9238             if (other.hasValue()) {
9239               setValue(other.getValue());
9240             }
9241             if (other.hasTimestamp()) {
9242               setTimestamp(other.getTimestamp());
9243             }
9244             if (other.hasDeleteType()) {
9245               setDeleteType(other.getDeleteType());
9246             }
9247             if (other.hasTags()) {
9248               setTags(other.getTags());
9249             }
9250             this.mergeUnknownFields(other.getUnknownFields());
9251             return this;
9252           }
9253 
9254           public final boolean isInitialized() {
9255             return true;
9256           }
9257 
9258           public Builder mergeFrom(
9259               com.google.protobuf.CodedInputStream input,
9260               com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9261               throws java.io.IOException {
9262             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parsedMessage = null;
9263             try {
9264               parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
9265             } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9266               parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) e.getUnfinishedMessage();
9267               throw e;
9268             } finally {
9269               if (parsedMessage != null) {
9270                 mergeFrom(parsedMessage);
9271               }
9272             }
9273             return this;
9274           }
9275           private int bitField0_;
9276 
9277           // optional bytes qualifier = 1;
9278           private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY;
9279           /**
9280            * <code>optional bytes qualifier = 1;</code>
9281            */
9282           public boolean hasQualifier() {
9283             return ((bitField0_ & 0x00000001) == 0x00000001);
9284           }
9285           /**
9286            * <code>optional bytes qualifier = 1;</code>
9287            */
9288           public com.google.protobuf.ByteString getQualifier() {
9289             return qualifier_;
9290           }
9291           /**
9292            * <code>optional bytes qualifier = 1;</code>
9293            */
9294           public Builder setQualifier(com.google.protobuf.ByteString value) {
9295             if (value == null) {
9296     throw new NullPointerException();
9297   }
9298   bitField0_ |= 0x00000001;
9299             qualifier_ = value;
9300             onChanged();
9301             return this;
9302           }
9303           /**
9304            * <code>optional bytes qualifier = 1;</code>
9305            */
9306           public Builder clearQualifier() {
9307             bitField0_ = (bitField0_ & ~0x00000001);
9308             qualifier_ = getDefaultInstance().getQualifier();
9309             onChanged();
9310             return this;
9311           }
9312 
9313           // optional bytes value = 2;
9314           private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY;
9315           /**
9316            * <code>optional bytes value = 2;</code>
9317            */
9318           public boolean hasValue() {
9319             return ((bitField0_ & 0x00000002) == 0x00000002);
9320           }
9321           /**
9322            * <code>optional bytes value = 2;</code>
9323            */
9324           public com.google.protobuf.ByteString getValue() {
9325             return value_;
9326           }
9327           /**
9328            * <code>optional bytes value = 2;</code>
9329            */
9330           public Builder setValue(com.google.protobuf.ByteString value) {
9331             if (value == null) {
9332     throw new NullPointerException();
9333   }
9334   bitField0_ |= 0x00000002;
9335             value_ = value;
9336             onChanged();
9337             return this;
9338           }
9339           /**
9340            * <code>optional bytes value = 2;</code>
9341            */
9342           public Builder clearValue() {
9343             bitField0_ = (bitField0_ & ~0x00000002);
9344             value_ = getDefaultInstance().getValue();
9345             onChanged();
9346             return this;
9347           }
9348 
9349           // optional uint64 timestamp = 3;
9350           private long timestamp_ ;
9351           /**
9352            * <code>optional uint64 timestamp = 3;</code>
9353            */
9354           public boolean hasTimestamp() {
9355             return ((bitField0_ & 0x00000004) == 0x00000004);
9356           }
9357           /**
9358            * <code>optional uint64 timestamp = 3;</code>
9359            */
9360           public long getTimestamp() {
9361             return timestamp_;
9362           }
9363           /**
9364            * <code>optional uint64 timestamp = 3;</code>
9365            */
9366           public Builder setTimestamp(long value) {
9367             bitField0_ |= 0x00000004;
9368             timestamp_ = value;
9369             onChanged();
9370             return this;
9371           }
9372           /**
9373            * <code>optional uint64 timestamp = 3;</code>
9374            */
9375           public Builder clearTimestamp() {
9376             bitField0_ = (bitField0_ & ~0x00000004);
9377             timestamp_ = 0L;
9378             onChanged();
9379             return this;
9380           }
9381 
9382           // optional .MutationProto.DeleteType delete_type = 4;
9383           private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION;
9384           /**
9385            * <code>optional .MutationProto.DeleteType delete_type = 4;</code>
9386            */
9387           public boolean hasDeleteType() {
9388             return ((bitField0_ & 0x00000008) == 0x00000008);
9389           }
9390           /**
9391            * <code>optional .MutationProto.DeleteType delete_type = 4;</code>
9392            */
9393           public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType() {
9394             return deleteType_;
9395           }
9396           /**
9397            * <code>optional .MutationProto.DeleteType delete_type = 4;</code>
9398            */
9399           public Builder setDeleteType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType value) {
9400             if (value == null) {
9401               throw new NullPointerException();
9402             }
9403             bitField0_ |= 0x00000008;
9404             deleteType_ = value;
9405             onChanged();
9406             return this;
9407           }
9408           /**
9409            * <code>optional .MutationProto.DeleteType delete_type = 4;</code>
9410            */
9411           public Builder clearDeleteType() {
9412             bitField0_ = (bitField0_ & ~0x00000008);
9413             deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION;
9414             onChanged();
9415             return this;
9416           }
9417 
9418           // optional bytes tags = 5;
9419           private com.google.protobuf.ByteString tags_ = com.google.protobuf.ByteString.EMPTY;
9420           /**
9421            * <code>optional bytes tags = 5;</code>
9422            */
9423           public boolean hasTags() {
9424             return ((bitField0_ & 0x00000010) == 0x00000010);
9425           }
9426           /**
9427            * <code>optional bytes tags = 5;</code>
9428            */
9429           public com.google.protobuf.ByteString getTags() {
9430             return tags_;
9431           }
9432           /**
9433            * <code>optional bytes tags = 5;</code>
9434            */
9435           public Builder setTags(com.google.protobuf.ByteString value) {
9436             if (value == null) {
9437     throw new NullPointerException();
9438   }
9439   bitField0_ |= 0x00000010;
9440             tags_ = value;
9441             onChanged();
9442             return this;
9443           }
9444           /**
9445            * <code>optional bytes tags = 5;</code>
9446            */
9447           public Builder clearTags() {
9448             bitField0_ = (bitField0_ & ~0x00000010);
9449             tags_ = getDefaultInstance().getTags();
9450             onChanged();
9451             return this;
9452           }
9453 
9454           // @@protoc_insertion_point(builder_scope:MutationProto.ColumnValue.QualifierValue)
9455         }
9456 
9457         static {
9458           defaultInstance = new QualifierValue(true);
9459           defaultInstance.initFields();
9460         }
9461 
9462         // @@protoc_insertion_point(class_scope:MutationProto.ColumnValue.QualifierValue)
9463       }
9464 
9465       private int bitField0_;
9466       // required bytes family = 1;
9467       public static final int FAMILY_FIELD_NUMBER = 1;
9468       private com.google.protobuf.ByteString family_;
9469       /**
9470        * <code>required bytes family = 1;</code>
9471        */
9472       public boolean hasFamily() {
9473         return ((bitField0_ & 0x00000001) == 0x00000001);
9474       }
9475       /**
9476        * <code>required bytes family = 1;</code>
9477        */
9478       public com.google.protobuf.ByteString getFamily() {
9479         return family_;
9480       }
9481 
9482       // repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;
9483       public static final int QUALIFIER_VALUE_FIELD_NUMBER = 2;
9484       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> qualifierValue_;
9485       /**
9486        * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9487        */
9488       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> getQualifierValueList() {
9489         return qualifierValue_;
9490       }
9491       /**
9492        * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9493        */
9494       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> 
9495           getQualifierValueOrBuilderList() {
9496         return qualifierValue_;
9497       }
9498       /**
9499        * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9500        */
9501       public int getQualifierValueCount() {
9502         return qualifierValue_.size();
9503       }
9504       /**
9505        * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9506        */
9507       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getQualifierValue(int index) {
9508         return qualifierValue_.get(index);
9509       }
9510       /**
9511        * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9512        */
9513       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder(
9514           int index) {
9515         return qualifierValue_.get(index);
9516       }
9517 
9518       private void initFields() {
9519         family_ = com.google.protobuf.ByteString.EMPTY;
9520         qualifierValue_ = java.util.Collections.emptyList();
9521       }
9522       private byte memoizedIsInitialized = -1;
9523       public final boolean isInitialized() {
9524         byte isInitialized = memoizedIsInitialized;
9525         if (isInitialized != -1) return isInitialized == 1;
9526 
9527         if (!hasFamily()) {
9528           memoizedIsInitialized = 0;
9529           return false;
9530         }
9531         memoizedIsInitialized = 1;
9532         return true;
9533       }
9534 
9535       public void writeTo(com.google.protobuf.CodedOutputStream output)
9536                           throws java.io.IOException {
9537         getSerializedSize();
9538         if (((bitField0_ & 0x00000001) == 0x00000001)) {
9539           output.writeBytes(1, family_);
9540         }
9541         for (int i = 0; i < qualifierValue_.size(); i++) {
9542           output.writeMessage(2, qualifierValue_.get(i));
9543         }
9544         getUnknownFields().writeTo(output);
9545       }
9546 
9547       private int memoizedSerializedSize = -1;
9548       public int getSerializedSize() {
9549         int size = memoizedSerializedSize;
9550         if (size != -1) return size;
9551 
9552         size = 0;
9553         if (((bitField0_ & 0x00000001) == 0x00000001)) {
9554           size += com.google.protobuf.CodedOutputStream
9555             .computeBytesSize(1, family_);
9556         }
9557         for (int i = 0; i < qualifierValue_.size(); i++) {
9558           size += com.google.protobuf.CodedOutputStream
9559             .computeMessageSize(2, qualifierValue_.get(i));
9560         }
9561         size += getUnknownFields().getSerializedSize();
9562         memoizedSerializedSize = size;
9563         return size;
9564       }
9565 
9566       private static final long serialVersionUID = 0L;
9567       @java.lang.Override
9568       protected java.lang.Object writeReplace()
9569           throws java.io.ObjectStreamException {
9570         return super.writeReplace();
9571       }
9572 
9573       @java.lang.Override
9574       public boolean equals(final java.lang.Object obj) {
9575         if (obj == this) {
9576          return true;
9577         }
9578         if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue)) {
9579           return super.equals(obj);
9580         }
9581         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue) obj;
9582 
9583         boolean result = true;
9584         result = result && (hasFamily() == other.hasFamily());
9585         if (hasFamily()) {
9586           result = result && getFamily()
9587               .equals(other.getFamily());
9588         }
9589         result = result && getQualifierValueList()
9590             .equals(other.getQualifierValueList());
9591         result = result &&
9592             getUnknownFields().equals(other.getUnknownFields());
9593         return result;
9594       }
9595 
9596       private int memoizedHashCode = 0;
9597       @java.lang.Override
9598       public int hashCode() {
9599         if (memoizedHashCode != 0) {
9600           return memoizedHashCode;
9601         }
9602         int hash = 41;
9603         hash = (19 * hash) + getDescriptorForType().hashCode();
9604         if (hasFamily()) {
9605           hash = (37 * hash) + FAMILY_FIELD_NUMBER;
9606           hash = (53 * hash) + getFamily().hashCode();
9607         }
9608         if (getQualifierValueCount() > 0) {
9609           hash = (37 * hash) + QUALIFIER_VALUE_FIELD_NUMBER;
9610           hash = (53 * hash) + getQualifierValueList().hashCode();
9611         }
9612         hash = (29 * hash) + getUnknownFields().hashCode();
9613         memoizedHashCode = hash;
9614         return hash;
9615       }
9616 
9617       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(
9618           com.google.protobuf.ByteString data)
9619           throws com.google.protobuf.InvalidProtocolBufferException {
9620         return PARSER.parseFrom(data);
9621       }
9622       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(
9623           com.google.protobuf.ByteString data,
9624           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9625           throws com.google.protobuf.InvalidProtocolBufferException {
9626         return PARSER.parseFrom(data, extensionRegistry);
9627       }
9628       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(byte[] data)
9629           throws com.google.protobuf.InvalidProtocolBufferException {
9630         return PARSER.parseFrom(data);
9631       }
9632       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(
9633           byte[] data,
9634           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9635           throws com.google.protobuf.InvalidProtocolBufferException {
9636         return PARSER.parseFrom(data, extensionRegistry);
9637       }
9638       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(java.io.InputStream input)
9639           throws java.io.IOException {
9640         return PARSER.parseFrom(input);
9641       }
9642       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(
9643           java.io.InputStream input,
9644           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9645           throws java.io.IOException {
9646         return PARSER.parseFrom(input, extensionRegistry);
9647       }
9648       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseDelimitedFrom(java.io.InputStream input)
9649           throws java.io.IOException {
9650         return PARSER.parseDelimitedFrom(input);
9651       }
9652       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseDelimitedFrom(
9653           java.io.InputStream input,
9654           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9655           throws java.io.IOException {
9656         return PARSER.parseDelimitedFrom(input, extensionRegistry);
9657       }
9658       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(
9659           com.google.protobuf.CodedInputStream input)
9660           throws java.io.IOException {
9661         return PARSER.parseFrom(input);
9662       }
9663       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(
9664           com.google.protobuf.CodedInputStream input,
9665           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9666           throws java.io.IOException {
9667         return PARSER.parseFrom(input, extensionRegistry);
9668       }
9669 
9670       public static Builder newBuilder() { return Builder.create(); }
9671       public Builder newBuilderForType() { return newBuilder(); }
9672       public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue prototype) {
9673         return newBuilder().mergeFrom(prototype);
9674       }
9675       public Builder toBuilder() { return newBuilder(this); }
9676 
9677       @java.lang.Override
9678       protected Builder newBuilderForType(
9679           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9680         Builder builder = new Builder(parent);
9681         return builder;
9682       }
9683       /**
9684        * Protobuf type {@code MutationProto.ColumnValue}
9685        */
9686       public static final class Builder extends
9687           com.google.protobuf.GeneratedMessage.Builder<Builder>
9688          implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder {
9689         public static final com.google.protobuf.Descriptors.Descriptor
9690             getDescriptor() {
9691           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_descriptor;
9692         }
9693 
9694         protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
9695             internalGetFieldAccessorTable() {
9696           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_fieldAccessorTable
9697               .ensureFieldAccessorsInitialized(
9698                   org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder.class);
9699         }
9700 
9701         // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.newBuilder()
9702         private Builder() {
9703           maybeForceBuilderInitialization();
9704         }
9705 
9706         private Builder(
9707             com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9708           super(parent);
9709           maybeForceBuilderInitialization();
9710         }
9711         private void maybeForceBuilderInitialization() {
9712           if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
9713             getQualifierValueFieldBuilder();
9714           }
9715         }
9716         private static Builder create() {
9717           return new Builder();
9718         }
9719 
9720         public Builder clear() {
9721           super.clear();
9722           family_ = com.google.protobuf.ByteString.EMPTY;
9723           bitField0_ = (bitField0_ & ~0x00000001);
9724           if (qualifierValueBuilder_ == null) {
9725             qualifierValue_ = java.util.Collections.emptyList();
9726             bitField0_ = (bitField0_ & ~0x00000002);
9727           } else {
9728             qualifierValueBuilder_.clear();
9729           }
9730           return this;
9731         }
9732 
9733         public Builder clone() {
9734           return create().mergeFrom(buildPartial());
9735         }
9736 
9737         public com.google.protobuf.Descriptors.Descriptor
9738             getDescriptorForType() {
9739           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_descriptor;
9740         }
9741 
9742         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue getDefaultInstanceForType() {
9743           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance();
9744         }
9745 
9746         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue build() {
9747           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue result = buildPartial();
9748           if (!result.isInitialized()) {
9749             throw newUninitializedMessageException(result);
9750           }
9751           return result;
9752         }
9753 
9754         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue buildPartial() {
9755           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue(this);
9756           int from_bitField0_ = bitField0_;
9757           int to_bitField0_ = 0;
9758           if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
9759             to_bitField0_ |= 0x00000001;
9760           }
9761           result.family_ = family_;
9762           if (qualifierValueBuilder_ == null) {
9763             if (((bitField0_ & 0x00000002) == 0x00000002)) {
9764               qualifierValue_ = java.util.Collections.unmodifiableList(qualifierValue_);
9765               bitField0_ = (bitField0_ & ~0x00000002);
9766             }
9767             result.qualifierValue_ = qualifierValue_;
9768           } else {
9769             result.qualifierValue_ = qualifierValueBuilder_.build();
9770           }
9771           result.bitField0_ = to_bitField0_;
9772           onBuilt();
9773           return result;
9774         }
9775 
9776         public Builder mergeFrom(com.google.protobuf.Message other) {
9777           if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue) {
9778             return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue)other);
9779           } else {
9780             super.mergeFrom(other);
9781             return this;
9782           }
9783         }
9784 
9785         public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue other) {
9786           if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance()) return this;
9787           if (other.hasFamily()) {
9788             setFamily(other.getFamily());
9789           }
9790           if (qualifierValueBuilder_ == null) {
9791             if (!other.qualifierValue_.isEmpty()) {
9792               if (qualifierValue_.isEmpty()) {
9793                 qualifierValue_ = other.qualifierValue_;
9794                 bitField0_ = (bitField0_ & ~0x00000002);
9795               } else {
9796                 ensureQualifierValueIsMutable();
9797                 qualifierValue_.addAll(other.qualifierValue_);
9798               }
9799               onChanged();
9800             }
9801           } else {
9802             if (!other.qualifierValue_.isEmpty()) {
9803               if (qualifierValueBuilder_.isEmpty()) {
9804                 qualifierValueBuilder_.dispose();
9805                 qualifierValueBuilder_ = null;
9806                 qualifierValue_ = other.qualifierValue_;
9807                 bitField0_ = (bitField0_ & ~0x00000002);
9808                 qualifierValueBuilder_ = 
9809                   com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
9810                      getQualifierValueFieldBuilder() : null;
9811               } else {
9812                 qualifierValueBuilder_.addAllMessages(other.qualifierValue_);
9813               }
9814             }
9815           }
9816           this.mergeUnknownFields(other.getUnknownFields());
9817           return this;
9818         }
9819 
9820         public final boolean isInitialized() {
9821           if (!hasFamily()) {
9822             
9823             return false;
9824           }
9825           return true;
9826         }
9827 
9828         public Builder mergeFrom(
9829             com.google.protobuf.CodedInputStream input,
9830             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9831             throws java.io.IOException {
9832           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parsedMessage = null;
9833           try {
9834             parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
9835           } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9836             parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue) e.getUnfinishedMessage();
9837             throw e;
9838           } finally {
9839             if (parsedMessage != null) {
9840               mergeFrom(parsedMessage);
9841             }
9842           }
9843           return this;
9844         }
9845         private int bitField0_;
9846 
9847         // required bytes family = 1;
9848         private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
9849         /**
9850          * <code>required bytes family = 1;</code>
9851          */
9852         public boolean hasFamily() {
9853           return ((bitField0_ & 0x00000001) == 0x00000001);
9854         }
9855         /**
9856          * <code>required bytes family = 1;</code>
9857          */
9858         public com.google.protobuf.ByteString getFamily() {
9859           return family_;
9860         }
9861         /**
9862          * <code>required bytes family = 1;</code>
9863          */
9864         public Builder setFamily(com.google.protobuf.ByteString value) {
9865           if (value == null) {
9866     throw new NullPointerException();
9867   }
9868   bitField0_ |= 0x00000001;
9869           family_ = value;
9870           onChanged();
9871           return this;
9872         }
9873         /**
9874          * <code>required bytes family = 1;</code>
9875          */
9876         public Builder clearFamily() {
9877           bitField0_ = (bitField0_ & ~0x00000001);
9878           family_ = getDefaultInstance().getFamily();
9879           onChanged();
9880           return this;
9881         }
9882 
9883         // repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;
9884         private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> qualifierValue_ =
9885           java.util.Collections.emptyList();
9886         private void ensureQualifierValueIsMutable() {
9887           if (!((bitField0_ & 0x00000002) == 0x00000002)) {
9888             qualifierValue_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue>(qualifierValue_);
9889             bitField0_ |= 0x00000002;
9890            }
9891         }
9892 
9893         private com.google.protobuf.RepeatedFieldBuilder<
9894             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> qualifierValueBuilder_;
9895 
9896         /**
9897          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9898          */
9899         public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> getQualifierValueList() {
9900           if (qualifierValueBuilder_ == null) {
9901             return java.util.Collections.unmodifiableList(qualifierValue_);
9902           } else {
9903             return qualifierValueBuilder_.getMessageList();
9904           }
9905         }
9906         /**
9907          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9908          */
9909         public int getQualifierValueCount() {
9910           if (qualifierValueBuilder_ == null) {
9911             return qualifierValue_.size();
9912           } else {
9913             return qualifierValueBuilder_.getCount();
9914           }
9915         }
9916         /**
9917          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9918          */
9919         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getQualifierValue(int index) {
9920           if (qualifierValueBuilder_ == null) {
9921             return qualifierValue_.get(index);
9922           } else {
9923             return qualifierValueBuilder_.getMessage(index);
9924           }
9925         }
9926         /**
9927          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9928          */
9929         public Builder setQualifierValue(
9930             int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value) {
9931           if (qualifierValueBuilder_ == null) {
9932             if (value == null) {
9933               throw new NullPointerException();
9934             }
9935             ensureQualifierValueIsMutable();
9936             qualifierValue_.set(index, value);
9937             onChanged();
9938           } else {
9939             qualifierValueBuilder_.setMessage(index, value);
9940           }
9941           return this;
9942         }
9943         /**
9944          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9945          */
9946         public Builder setQualifierValue(
9947             int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue) {
9948           if (qualifierValueBuilder_ == null) {
9949             ensureQualifierValueIsMutable();
9950             qualifierValue_.set(index, builderForValue.build());
9951             onChanged();
9952           } else {
9953             qualifierValueBuilder_.setMessage(index, builderForValue.build());
9954           }
9955           return this;
9956         }
9957         /**
9958          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9959          */
9960         public Builder addQualifierValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value) {
9961           if (qualifierValueBuilder_ == null) {
9962             if (value == null) {
9963               throw new NullPointerException();
9964             }
9965             ensureQualifierValueIsMutable();
9966             qualifierValue_.add(value);
9967             onChanged();
9968           } else {
9969             qualifierValueBuilder_.addMessage(value);
9970           }
9971           return this;
9972         }
9973         /**
9974          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9975          */
9976         public Builder addQualifierValue(
9977             int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value) {
9978           if (qualifierValueBuilder_ == null) {
9979             if (value == null) {
9980               throw new NullPointerException();
9981             }
9982             ensureQualifierValueIsMutable();
9983             qualifierValue_.add(index, value);
9984             onChanged();
9985           } else {
9986             qualifierValueBuilder_.addMessage(index, value);
9987           }
9988           return this;
9989         }
9990         /**
9991          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9992          */
9993         public Builder addQualifierValue(
9994             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue) {
9995           if (qualifierValueBuilder_ == null) {
9996             ensureQualifierValueIsMutable();
9997             qualifierValue_.add(builderForValue.build());
9998             onChanged();
9999           } else {
10000             qualifierValueBuilder_.addMessage(builderForValue.build());
10001           }
10002           return this;
10003         }
10004         /**
10005          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10006          */
10007         public Builder addQualifierValue(
10008             int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue) {
10009           if (qualifierValueBuilder_ == null) {
10010             ensureQualifierValueIsMutable();
10011             qualifierValue_.add(index, builderForValue.build());
10012             onChanged();
10013           } else {
10014             qualifierValueBuilder_.addMessage(index, builderForValue.build());
10015           }
10016           return this;
10017         }
10018         /**
10019          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10020          */
10021         public Builder addAllQualifierValue(
10022             java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> values) {
10023           if (qualifierValueBuilder_ == null) {
10024             ensureQualifierValueIsMutable();
10025             super.addAll(values, qualifierValue_);
10026             onChanged();
10027           } else {
10028             qualifierValueBuilder_.addAllMessages(values);
10029           }
10030           return this;
10031         }
10032         /**
10033          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10034          */
10035         public Builder clearQualifierValue() {
10036           if (qualifierValueBuilder_ == null) {
10037             qualifierValue_ = java.util.Collections.emptyList();
10038             bitField0_ = (bitField0_ & ~0x00000002);
10039             onChanged();
10040           } else {
10041             qualifierValueBuilder_.clear();
10042           }
10043           return this;
10044         }
10045         /**
10046          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10047          */
10048         public Builder removeQualifierValue(int index) {
10049           if (qualifierValueBuilder_ == null) {
10050             ensureQualifierValueIsMutable();
10051             qualifierValue_.remove(index);
10052             onChanged();
10053           } else {
10054             qualifierValueBuilder_.remove(index);
10055           }
10056           return this;
10057         }
10058         /**
10059          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10060          */
10061         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder getQualifierValueBuilder(
10062             int index) {
10063           return getQualifierValueFieldBuilder().getBuilder(index);
10064         }
10065         /**
10066          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10067          */
10068         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder(
10069             int index) {
10070           if (qualifierValueBuilder_ == null) {
10071             return qualifierValue_.get(index);  } else {
10072             return qualifierValueBuilder_.getMessageOrBuilder(index);
10073           }
10074         }
10075         /**
10076          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10077          */
10078         public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> 
10079              getQualifierValueOrBuilderList() {
10080           if (qualifierValueBuilder_ != null) {
10081             return qualifierValueBuilder_.getMessageOrBuilderList();
10082           } else {
10083             return java.util.Collections.unmodifiableList(qualifierValue_);
10084           }
10085         }
10086         /**
10087          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10088          */
10089         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder addQualifierValueBuilder() {
10090           return getQualifierValueFieldBuilder().addBuilder(
10091               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance());
10092         }
10093         /**
10094          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10095          */
10096         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder addQualifierValueBuilder(
10097             int index) {
10098           return getQualifierValueFieldBuilder().addBuilder(
10099               index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance());
10100         }
10101         /**
10102          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10103          */
10104         public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder> 
10105              getQualifierValueBuilderList() {
10106           return getQualifierValueFieldBuilder().getBuilderList();
10107         }
10108         private com.google.protobuf.RepeatedFieldBuilder<
10109             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> 
10110             getQualifierValueFieldBuilder() {
10111           if (qualifierValueBuilder_ == null) {
10112             qualifierValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
10113                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder>(
10114                     qualifierValue_,
10115                     ((bitField0_ & 0x00000002) == 0x00000002),
10116                     getParentForChildren(),
10117                     isClean());
10118             qualifierValue_ = null;
10119           }
10120           return qualifierValueBuilder_;
10121         }
10122 
10123         // @@protoc_insertion_point(builder_scope:MutationProto.ColumnValue)
10124       }
10125 
10126       static {
10127         defaultInstance = new ColumnValue(true);
10128         defaultInstance.initFields();
10129       }
10130 
10131       // @@protoc_insertion_point(class_scope:MutationProto.ColumnValue)
10132     }
10133 
10134     private int bitField0_;
10135     // optional bytes row = 1;
10136     public static final int ROW_FIELD_NUMBER = 1;
10137     private com.google.protobuf.ByteString row_;
10138     /**
10139      * <code>optional bytes row = 1;</code>
10140      */
10141     public boolean hasRow() {
10142       return ((bitField0_ & 0x00000001) == 0x00000001);
10143     }
10144     /**
10145      * <code>optional bytes row = 1;</code>
10146      */
10147     public com.google.protobuf.ByteString getRow() {
10148       return row_;
10149     }
10150 
10151     // optional .MutationProto.MutationType mutate_type = 2;
10152     public static final int MUTATE_TYPE_FIELD_NUMBER = 2;
10153     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType mutateType_;
10154     /**
10155      * <code>optional .MutationProto.MutationType mutate_type = 2;</code>
10156      */
10157     public boolean hasMutateType() {
10158       return ((bitField0_ & 0x00000002) == 0x00000002);
10159     }
10160     /**
10161      * <code>optional .MutationProto.MutationType mutate_type = 2;</code>
10162      */
10163     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType getMutateType() {
10164       return mutateType_;
10165     }
10166 
10167     // repeated .MutationProto.ColumnValue column_value = 3;
10168     public static final int COLUMN_VALUE_FIELD_NUMBER = 3;
10169     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> columnValue_;
10170     /**
10171      * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
10172      */
10173     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> getColumnValueList() {
10174       return columnValue_;
10175     }
10176     /**
10177      * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
10178      */
10179     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> 
10180         getColumnValueOrBuilderList() {
10181       return columnValue_;
10182     }
10183     /**
10184      * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
10185      */
10186     public int getColumnValueCount() {
10187       return columnValue_.size();
10188     }
10189     /**
10190      * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
10191      */
10192     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue getColumnValue(int index) {
10193       return columnValue_.get(index);
10194     }
10195     /**
10196      * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
10197      */
10198     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder getColumnValueOrBuilder(
10199         int index) {
10200       return columnValue_.get(index);
10201     }
10202 
10203     // optional uint64 timestamp = 4;
10204     public static final int TIMESTAMP_FIELD_NUMBER = 4;
10205     private long timestamp_;
10206     /**
10207      * <code>optional uint64 timestamp = 4;</code>
10208      */
10209     public boolean hasTimestamp() {
10210       return ((bitField0_ & 0x00000004) == 0x00000004);
10211     }
10212     /**
10213      * <code>optional uint64 timestamp = 4;</code>
10214      */
10215     public long getTimestamp() {
10216       return timestamp_;
10217     }
10218 
10219     // repeated .NameBytesPair attribute = 5;
10220     public static final int ATTRIBUTE_FIELD_NUMBER = 5;
10221     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_;
10222     /**
10223      * <code>repeated .NameBytesPair attribute = 5;</code>
10224      */
10225     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() {
10226       return attribute_;
10227     }
10228     /**
10229      * <code>repeated .NameBytesPair attribute = 5;</code>
10230      */
10231     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
10232         getAttributeOrBuilderList() {
10233       return attribute_;
10234     }
10235     /**
10236      * <code>repeated .NameBytesPair attribute = 5;</code>
10237      */
10238     public int getAttributeCount() {
10239       return attribute_.size();
10240     }
10241     /**
10242      * <code>repeated .NameBytesPair attribute = 5;</code>
10243      */
10244     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) {
10245       return attribute_.get(index);
10246     }
10247     /**
10248      * <code>repeated .NameBytesPair attribute = 5;</code>
10249      */
10250     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
10251         int index) {
10252       return attribute_.get(index);
10253     }
10254 
10255     // optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];
10256     public static final int DURABILITY_FIELD_NUMBER = 6;
10257     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability durability_;
10258     /**
10259      * <code>optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
10260      */
10261     public boolean hasDurability() {
10262       return ((bitField0_ & 0x00000008) == 0x00000008);
10263     }
10264     /**
10265      * <code>optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
10266      */
10267     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability getDurability() {
10268       return durability_;
10269     }
10270 
10271     // optional .TimeRange time_range = 7;
10272     public static final int TIME_RANGE_FIELD_NUMBER = 7;
10273     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_;
10274     /**
10275      * <code>optional .TimeRange time_range = 7;</code>
10276      *
10277      * <pre>
10278      * For some mutations, a result may be returned, in which case,
10279      * time range can be specified for potential performance gain
10280      * </pre>
10281      */
10282     public boolean hasTimeRange() {
10283       return ((bitField0_ & 0x00000010) == 0x00000010);
10284     }
10285     /**
10286      * <code>optional .TimeRange time_range = 7;</code>
10287      *
10288      * <pre>
10289      * For some mutations, a result may be returned, in which case,
10290      * time range can be specified for potential performance gain
10291      * </pre>
10292      */
10293     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() {
10294       return timeRange_;
10295     }
10296     /**
10297      * <code>optional .TimeRange time_range = 7;</code>
10298      *
10299      * <pre>
10300      * For some mutations, a result may be returned, in which case,
10301      * time range can be specified for potential performance gain
10302      * </pre>
10303      */
10304     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() {
10305       return timeRange_;
10306     }
10307 
10308     // optional int32 associated_cell_count = 8;
10309     public static final int ASSOCIATED_CELL_COUNT_FIELD_NUMBER = 8;
10310     private int associatedCellCount_;
10311     /**
10312      * <code>optional int32 associated_cell_count = 8;</code>
10313      *
10314      * <pre>
10315      * The below count is set when the associated cells are NOT
10316      * part of this protobuf message; they are passed alongside
10317      * and then this Message is a placeholder with metadata.  The
10318      * count is needed to know how many to peel off the block of Cells as
10319      * ours.  NOTE: This is different from the pb managed cell_count of the
10320      * 'cell' field above which is non-null when the cells are pb'd.
10321      * </pre>
10322      */
10323     public boolean hasAssociatedCellCount() {
10324       return ((bitField0_ & 0x00000020) == 0x00000020);
10325     }
10326     /**
10327      * <code>optional int32 associated_cell_count = 8;</code>
10328      *
10329      * <pre>
10330      * The below count is set when the associated cells are NOT
10331      * part of this protobuf message; they are passed alongside
10332      * and then this Message is a placeholder with metadata.  The
10333      * count is needed to know how many to peel off the block of Cells as
10334      * ours.  NOTE: This is different from the pb managed cell_count of the
10335      * 'cell' field above which is non-null when the cells are pb'd.
10336      * </pre>
10337      */
10338     public int getAssociatedCellCount() {
10339       return associatedCellCount_;
10340     }
10341 
10342     // optional uint64 nonce = 9;
10343     public static final int NONCE_FIELD_NUMBER = 9;
10344     private long nonce_;
10345     /**
10346      * <code>optional uint64 nonce = 9;</code>
10347      */
10348     public boolean hasNonce() {
10349       return ((bitField0_ & 0x00000040) == 0x00000040);
10350     }
10351     /**
10352      * <code>optional uint64 nonce = 9;</code>
10353      */
10354     public long getNonce() {
10355       return nonce_;
10356     }
10357 
10358     private void initFields() {
10359       row_ = com.google.protobuf.ByteString.EMPTY;
10360       mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND;
10361       columnValue_ = java.util.Collections.emptyList();
10362       timestamp_ = 0L;
10363       attribute_ = java.util.Collections.emptyList();
10364       durability_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT;
10365       timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
10366       associatedCellCount_ = 0;
10367       nonce_ = 0L;
10368     }
10369     private byte memoizedIsInitialized = -1;
10370     public final boolean isInitialized() {
10371       byte isInitialized = memoizedIsInitialized;
10372       if (isInitialized != -1) return isInitialized == 1;
10373 
10374       for (int i = 0; i < getColumnValueCount(); i++) {
10375         if (!getColumnValue(i).isInitialized()) {
10376           memoizedIsInitialized = 0;
10377           return false;
10378         }
10379       }
10380       for (int i = 0; i < getAttributeCount(); i++) {
10381         if (!getAttribute(i).isInitialized()) {
10382           memoizedIsInitialized = 0;
10383           return false;
10384         }
10385       }
10386       memoizedIsInitialized = 1;
10387       return true;
10388     }
10389 
10390     public void writeTo(com.google.protobuf.CodedOutputStream output)
10391                         throws java.io.IOException {
10392       getSerializedSize();
10393       if (((bitField0_ & 0x00000001) == 0x00000001)) {
10394         output.writeBytes(1, row_);
10395       }
10396       if (((bitField0_ & 0x00000002) == 0x00000002)) {
10397         output.writeEnum(2, mutateType_.getNumber());
10398       }
10399       for (int i = 0; i < columnValue_.size(); i++) {
10400         output.writeMessage(3, columnValue_.get(i));
10401       }
10402       if (((bitField0_ & 0x00000004) == 0x00000004)) {
10403         output.writeUInt64(4, timestamp_);
10404       }
10405       for (int i = 0; i < attribute_.size(); i++) {
10406         output.writeMessage(5, attribute_.get(i));
10407       }
10408       if (((bitField0_ & 0x00000008) == 0x00000008)) {
10409         output.writeEnum(6, durability_.getNumber());
10410       }
10411       if (((bitField0_ & 0x00000010) == 0x00000010)) {
10412         output.writeMessage(7, timeRange_);
10413       }
10414       if (((bitField0_ & 0x00000020) == 0x00000020)) {
10415         output.writeInt32(8, associatedCellCount_);
10416       }
10417       if (((bitField0_ & 0x00000040) == 0x00000040)) {
10418         output.writeUInt64(9, nonce_);
10419       }
10420       getUnknownFields().writeTo(output);
10421     }
10422 
10423     private int memoizedSerializedSize = -1;
10424     public int getSerializedSize() {
10425       int size = memoizedSerializedSize;
10426       if (size != -1) return size;
10427 
10428       size = 0;
10429       if (((bitField0_ & 0x00000001) == 0x00000001)) {
10430         size += com.google.protobuf.CodedOutputStream
10431           .computeBytesSize(1, row_);
10432       }
10433       if (((bitField0_ & 0x00000002) == 0x00000002)) {
10434         size += com.google.protobuf.CodedOutputStream
10435           .computeEnumSize(2, mutateType_.getNumber());
10436       }
10437       for (int i = 0; i < columnValue_.size(); i++) {
10438         size += com.google.protobuf.CodedOutputStream
10439           .computeMessageSize(3, columnValue_.get(i));
10440       }
10441       if (((bitField0_ & 0x00000004) == 0x00000004)) {
10442         size += com.google.protobuf.CodedOutputStream
10443           .computeUInt64Size(4, timestamp_);
10444       }
10445       for (int i = 0; i < attribute_.size(); i++) {
10446         size += com.google.protobuf.CodedOutputStream
10447           .computeMessageSize(5, attribute_.get(i));
10448       }
10449       if (((bitField0_ & 0x00000008) == 0x00000008)) {
10450         size += com.google.protobuf.CodedOutputStream
10451           .computeEnumSize(6, durability_.getNumber());
10452       }
10453       if (((bitField0_ & 0x00000010) == 0x00000010)) {
10454         size += com.google.protobuf.CodedOutputStream
10455           .computeMessageSize(7, timeRange_);
10456       }
10457       if (((bitField0_ & 0x00000020) == 0x00000020)) {
10458         size += com.google.protobuf.CodedOutputStream
10459           .computeInt32Size(8, associatedCellCount_);
10460       }
10461       if (((bitField0_ & 0x00000040) == 0x00000040)) {
10462         size += com.google.protobuf.CodedOutputStream
10463           .computeUInt64Size(9, nonce_);
10464       }
10465       size += getUnknownFields().getSerializedSize();
10466       memoizedSerializedSize = size;
10467       return size;
10468     }
10469 
10470     private static final long serialVersionUID = 0L;
10471     @java.lang.Override
10472     protected java.lang.Object writeReplace()
10473         throws java.io.ObjectStreamException {
10474       return super.writeReplace();
10475     }
10476 
10477     @java.lang.Override
10478     public boolean equals(final java.lang.Object obj) {
10479       if (obj == this) {
10480        return true;
10481       }
10482       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto)) {
10483         return super.equals(obj);
10484       }
10485       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto) obj;
10486 
10487       boolean result = true;
10488       result = result && (hasRow() == other.hasRow());
10489       if (hasRow()) {
10490         result = result && getRow()
10491             .equals(other.getRow());
10492       }
10493       result = result && (hasMutateType() == other.hasMutateType());
10494       if (hasMutateType()) {
10495         result = result &&
10496             (getMutateType() == other.getMutateType());
10497       }
10498       result = result && getColumnValueList()
10499           .equals(other.getColumnValueList());
10500       result = result && (hasTimestamp() == other.hasTimestamp());
10501       if (hasTimestamp()) {
10502         result = result && (getTimestamp()
10503             == other.getTimestamp());
10504       }
10505       result = result && getAttributeList()
10506           .equals(other.getAttributeList());
10507       result = result && (hasDurability() == other.hasDurability());
10508       if (hasDurability()) {
10509         result = result &&
10510             (getDurability() == other.getDurability());
10511       }
10512       result = result && (hasTimeRange() == other.hasTimeRange());
10513       if (hasTimeRange()) {
10514         result = result && getTimeRange()
10515             .equals(other.getTimeRange());
10516       }
10517       result = result && (hasAssociatedCellCount() == other.hasAssociatedCellCount());
10518       if (hasAssociatedCellCount()) {
10519         result = result && (getAssociatedCellCount()
10520             == other.getAssociatedCellCount());
10521       }
10522       result = result && (hasNonce() == other.hasNonce());
10523       if (hasNonce()) {
10524         result = result && (getNonce()
10525             == other.getNonce());
10526       }
10527       result = result &&
10528           getUnknownFields().equals(other.getUnknownFields());
10529       return result;
10530     }
10531 
10532     private int memoizedHashCode = 0;
10533     @java.lang.Override
10534     public int hashCode() {
10535       if (memoizedHashCode != 0) {
10536         return memoizedHashCode;
10537       }
10538       int hash = 41;
10539       hash = (19 * hash) + getDescriptorForType().hashCode();
10540       if (hasRow()) {
10541         hash = (37 * hash) + ROW_FIELD_NUMBER;
10542         hash = (53 * hash) + getRow().hashCode();
10543       }
10544       if (hasMutateType()) {
10545         hash = (37 * hash) + MUTATE_TYPE_FIELD_NUMBER;
10546         hash = (53 * hash) + hashEnum(getMutateType());
10547       }
10548       if (getColumnValueCount() > 0) {
10549         hash = (37 * hash) + COLUMN_VALUE_FIELD_NUMBER;
10550         hash = (53 * hash) + getColumnValueList().hashCode();
10551       }
10552       if (hasTimestamp()) {
10553         hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER;
10554         hash = (53 * hash) + hashLong(getTimestamp());
10555       }
10556       if (getAttributeCount() > 0) {
10557         hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER;
10558         hash = (53 * hash) + getAttributeList().hashCode();
10559       }
10560       if (hasDurability()) {
10561         hash = (37 * hash) + DURABILITY_FIELD_NUMBER;
10562         hash = (53 * hash) + hashEnum(getDurability());
10563       }
10564       if (hasTimeRange()) {
10565         hash = (37 * hash) + TIME_RANGE_FIELD_NUMBER;
10566         hash = (53 * hash) + getTimeRange().hashCode();
10567       }
10568       if (hasAssociatedCellCount()) {
10569         hash = (37 * hash) + ASSOCIATED_CELL_COUNT_FIELD_NUMBER;
10570         hash = (53 * hash) + getAssociatedCellCount();
10571       }
10572       if (hasNonce()) {
10573         hash = (37 * hash) + NONCE_FIELD_NUMBER;
10574         hash = (53 * hash) + hashLong(getNonce());
10575       }
10576       hash = (29 * hash) + getUnknownFields().hashCode();
10577       memoizedHashCode = hash;
10578       return hash;
10579     }
10580 
10581     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(
10582         com.google.protobuf.ByteString data)
10583         throws com.google.protobuf.InvalidProtocolBufferException {
10584       return PARSER.parseFrom(data);
10585     }
10586     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(
10587         com.google.protobuf.ByteString data,
10588         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10589         throws com.google.protobuf.InvalidProtocolBufferException {
10590       return PARSER.parseFrom(data, extensionRegistry);
10591     }
10592     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(byte[] data)
10593         throws com.google.protobuf.InvalidProtocolBufferException {
10594       return PARSER.parseFrom(data);
10595     }
10596     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(
10597         byte[] data,
10598         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10599         throws com.google.protobuf.InvalidProtocolBufferException {
10600       return PARSER.parseFrom(data, extensionRegistry);
10601     }
10602     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(java.io.InputStream input)
10603         throws java.io.IOException {
10604       return PARSER.parseFrom(input);
10605     }
10606     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(
10607         java.io.InputStream input,
10608         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10609         throws java.io.IOException {
10610       return PARSER.parseFrom(input, extensionRegistry);
10611     }
10612     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseDelimitedFrom(java.io.InputStream input)
10613         throws java.io.IOException {
10614       return PARSER.parseDelimitedFrom(input);
10615     }
10616     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseDelimitedFrom(
10617         java.io.InputStream input,
10618         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10619         throws java.io.IOException {
10620       return PARSER.parseDelimitedFrom(input, extensionRegistry);
10621     }
10622     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(
10623         com.google.protobuf.CodedInputStream input)
10624         throws java.io.IOException {
10625       return PARSER.parseFrom(input);
10626     }
10627     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(
10628         com.google.protobuf.CodedInputStream input,
10629         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10630         throws java.io.IOException {
10631       return PARSER.parseFrom(input, extensionRegistry);
10632     }
10633 
10634     public static Builder newBuilder() { return Builder.create(); }
10635     public Builder newBuilderForType() { return newBuilder(); }
10636     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto prototype) {
10637       return newBuilder().mergeFrom(prototype);
10638     }
10639     public Builder toBuilder() { return newBuilder(this); }
10640 
10641     @java.lang.Override
10642     protected Builder newBuilderForType(
10643         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10644       Builder builder = new Builder(parent);
10645       return builder;
10646     }
10647     /**
10648      * Protobuf type {@code MutationProto}
10649      *
10650      * <pre>
10651      **
10652      * A specific mutation inside a mutate request.
10653      * It can be an append, increment, put or delete based
10654      * on the mutation type.  It can be fully filled in or
10655      * only metadata present because data is being carried
10656      * elsewhere outside of pb.
10657      * </pre>
10658      */
10659     public static final class Builder extends
10660         com.google.protobuf.GeneratedMessage.Builder<Builder>
10661        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder {
10662       public static final com.google.protobuf.Descriptors.Descriptor
10663           getDescriptor() {
10664         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_descriptor;
10665       }
10666 
10667       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
10668           internalGetFieldAccessorTable() {
10669         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_fieldAccessorTable
10670             .ensureFieldAccessorsInitialized(
10671                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder.class);
10672       }
10673 
10674       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.newBuilder()
10675       private Builder() {
10676         maybeForceBuilderInitialization();
10677       }
10678 
10679       private Builder(
10680           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10681         super(parent);
10682         maybeForceBuilderInitialization();
10683       }
10684       private void maybeForceBuilderInitialization() {
10685         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
10686           getColumnValueFieldBuilder();
10687           getAttributeFieldBuilder();
10688           getTimeRangeFieldBuilder();
10689         }
10690       }
10691       private static Builder create() {
10692         return new Builder();
10693       }
10694 
10695       public Builder clear() {
10696         super.clear();
10697         row_ = com.google.protobuf.ByteString.EMPTY;
10698         bitField0_ = (bitField0_ & ~0x00000001);
10699         mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND;
10700         bitField0_ = (bitField0_ & ~0x00000002);
10701         if (columnValueBuilder_ == null) {
10702           columnValue_ = java.util.Collections.emptyList();
10703           bitField0_ = (bitField0_ & ~0x00000004);
10704         } else {
10705           columnValueBuilder_.clear();
10706         }
10707         timestamp_ = 0L;
10708         bitField0_ = (bitField0_ & ~0x00000008);
10709         if (attributeBuilder_ == null) {
10710           attribute_ = java.util.Collections.emptyList();
10711           bitField0_ = (bitField0_ & ~0x00000010);
10712         } else {
10713           attributeBuilder_.clear();
10714         }
10715         durability_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT;
10716         bitField0_ = (bitField0_ & ~0x00000020);
10717         if (timeRangeBuilder_ == null) {
10718           timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
10719         } else {
10720           timeRangeBuilder_.clear();
10721         }
10722         bitField0_ = (bitField0_ & ~0x00000040);
10723         associatedCellCount_ = 0;
10724         bitField0_ = (bitField0_ & ~0x00000080);
10725         nonce_ = 0L;
10726         bitField0_ = (bitField0_ & ~0x00000100);
10727         return this;
10728       }
10729 
10730       public Builder clone() {
10731         return create().mergeFrom(buildPartial());
10732       }
10733 
10734       public com.google.protobuf.Descriptors.Descriptor
10735           getDescriptorForType() {
10736         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_descriptor;
10737       }
10738 
10739       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getDefaultInstanceForType() {
10740         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
10741       }
10742 
10743       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto build() {
10744         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto result = buildPartial();
10745         if (!result.isInitialized()) {
10746           throw newUninitializedMessageException(result);
10747         }
10748         return result;
10749       }
10750 
10751       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto buildPartial() {
10752         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto(this);
10753         int from_bitField0_ = bitField0_;
10754         int to_bitField0_ = 0;
10755         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
10756           to_bitField0_ |= 0x00000001;
10757         }
10758         result.row_ = row_;
10759         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
10760           to_bitField0_ |= 0x00000002;
10761         }
10762         result.mutateType_ = mutateType_;
10763         if (columnValueBuilder_ == null) {
10764           if (((bitField0_ & 0x00000004) == 0x00000004)) {
10765             columnValue_ = java.util.Collections.unmodifiableList(columnValue_);
10766             bitField0_ = (bitField0_ & ~0x00000004);
10767           }
10768           result.columnValue_ = columnValue_;
10769         } else {
10770           result.columnValue_ = columnValueBuilder_.build();
10771         }
10772         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
10773           to_bitField0_ |= 0x00000004;
10774         }
10775         result.timestamp_ = timestamp_;
10776         if (attributeBuilder_ == null) {
10777           if (((bitField0_ & 0x00000010) == 0x00000010)) {
10778             attribute_ = java.util.Collections.unmodifiableList(attribute_);
10779             bitField0_ = (bitField0_ & ~0x00000010);
10780           }
10781           result.attribute_ = attribute_;
10782         } else {
10783           result.attribute_ = attributeBuilder_.build();
10784         }
10785         if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
10786           to_bitField0_ |= 0x00000008;
10787         }
10788         result.durability_ = durability_;
10789         if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
10790           to_bitField0_ |= 0x00000010;
10791         }
10792         if (timeRangeBuilder_ == null) {
10793           result.timeRange_ = timeRange_;
10794         } else {
10795           result.timeRange_ = timeRangeBuilder_.build();
10796         }
10797         if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
10798           to_bitField0_ |= 0x00000020;
10799         }
10800         result.associatedCellCount_ = associatedCellCount_;
10801         if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
10802           to_bitField0_ |= 0x00000040;
10803         }
10804         result.nonce_ = nonce_;
10805         result.bitField0_ = to_bitField0_;
10806         onBuilt();
10807         return result;
10808       }
10809 
10810       public Builder mergeFrom(com.google.protobuf.Message other) {
10811         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto) {
10812           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto)other);
10813         } else {
10814           super.mergeFrom(other);
10815           return this;
10816         }
10817       }
10818 
10819       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto other) {
10820         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()) return this;
10821         if (other.hasRow()) {
10822           setRow(other.getRow());
10823         }
10824         if (other.hasMutateType()) {
10825           setMutateType(other.getMutateType());
10826         }
10827         if (columnValueBuilder_ == null) {
10828           if (!other.columnValue_.isEmpty()) {
10829             if (columnValue_.isEmpty()) {
10830               columnValue_ = other.columnValue_;
10831               bitField0_ = (bitField0_ & ~0x00000004);
10832             } else {
10833               ensureColumnValueIsMutable();
10834               columnValue_.addAll(other.columnValue_);
10835             }
10836             onChanged();
10837           }
10838         } else {
10839           if (!other.columnValue_.isEmpty()) {
10840             if (columnValueBuilder_.isEmpty()) {
10841               columnValueBuilder_.dispose();
10842               columnValueBuilder_ = null;
10843               columnValue_ = other.columnValue_;
10844               bitField0_ = (bitField0_ & ~0x00000004);
10845               columnValueBuilder_ = 
10846                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
10847                    getColumnValueFieldBuilder() : null;
10848             } else {
10849               columnValueBuilder_.addAllMessages(other.columnValue_);
10850             }
10851           }
10852         }
10853         if (other.hasTimestamp()) {
10854           setTimestamp(other.getTimestamp());
10855         }
10856         if (attributeBuilder_ == null) {
10857           if (!other.attribute_.isEmpty()) {
10858             if (attribute_.isEmpty()) {
10859               attribute_ = other.attribute_;
10860               bitField0_ = (bitField0_ & ~0x00000010);
10861             } else {
10862               ensureAttributeIsMutable();
10863               attribute_.addAll(other.attribute_);
10864             }
10865             onChanged();
10866           }
10867         } else {
10868           if (!other.attribute_.isEmpty()) {
10869             if (attributeBuilder_.isEmpty()) {
10870               attributeBuilder_.dispose();
10871               attributeBuilder_ = null;
10872               attribute_ = other.attribute_;
10873               bitField0_ = (bitField0_ & ~0x00000010);
10874               attributeBuilder_ = 
10875                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
10876                    getAttributeFieldBuilder() : null;
10877             } else {
10878               attributeBuilder_.addAllMessages(other.attribute_);
10879             }
10880           }
10881         }
10882         if (other.hasDurability()) {
10883           setDurability(other.getDurability());
10884         }
10885         if (other.hasTimeRange()) {
10886           mergeTimeRange(other.getTimeRange());
10887         }
10888         if (other.hasAssociatedCellCount()) {
10889           setAssociatedCellCount(other.getAssociatedCellCount());
10890         }
10891         if (other.hasNonce()) {
10892           setNonce(other.getNonce());
10893         }
10894         this.mergeUnknownFields(other.getUnknownFields());
10895         return this;
10896       }
10897 
10898       public final boolean isInitialized() {
10899         for (int i = 0; i < getColumnValueCount(); i++) {
10900           if (!getColumnValue(i).isInitialized()) {
10901             
10902             return false;
10903           }
10904         }
10905         for (int i = 0; i < getAttributeCount(); i++) {
10906           if (!getAttribute(i).isInitialized()) {
10907             
10908             return false;
10909           }
10910         }
10911         return true;
10912       }
10913 
10914       public Builder mergeFrom(
10915           com.google.protobuf.CodedInputStream input,
10916           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10917           throws java.io.IOException {
10918         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parsedMessage = null;
10919         try {
10920           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
10921         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10922           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto) e.getUnfinishedMessage();
10923           throw e;
10924         } finally {
10925           if (parsedMessage != null) {
10926             mergeFrom(parsedMessage);
10927           }
10928         }
10929         return this;
10930       }
10931       private int bitField0_;
10932 
10933       // optional bytes row = 1;
10934       private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY;
10935       /**
10936        * <code>optional bytes row = 1;</code>
10937        */
10938       public boolean hasRow() {
10939         return ((bitField0_ & 0x00000001) == 0x00000001);
10940       }
10941       /**
10942        * <code>optional bytes row = 1;</code>
10943        */
10944       public com.google.protobuf.ByteString getRow() {
10945         return row_;
10946       }
10947       /**
10948        * <code>optional bytes row = 1;</code>
10949        */
10950       public Builder setRow(com.google.protobuf.ByteString value) {
10951         if (value == null) {
10952     throw new NullPointerException();
10953   }
10954   bitField0_ |= 0x00000001;
10955         row_ = value;
10956         onChanged();
10957         return this;
10958       }
10959       /**
10960        * <code>optional bytes row = 1;</code>
10961        */
10962       public Builder clearRow() {
10963         bitField0_ = (bitField0_ & ~0x00000001);
10964         row_ = getDefaultInstance().getRow();
10965         onChanged();
10966         return this;
10967       }
10968 
10969       // optional .MutationProto.MutationType mutate_type = 2;
10970       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND;
10971       /**
10972        * <code>optional .MutationProto.MutationType mutate_type = 2;</code>
10973        */
10974       public boolean hasMutateType() {
10975         return ((bitField0_ & 0x00000002) == 0x00000002);
10976       }
10977       /**
10978        * <code>optional .MutationProto.MutationType mutate_type = 2;</code>
10979        */
10980       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType getMutateType() {
10981         return mutateType_;
10982       }
10983       /**
10984        * <code>optional .MutationProto.MutationType mutate_type = 2;</code>
10985        */
10986       public Builder setMutateType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType value) {
10987         if (value == null) {
10988           throw new NullPointerException();
10989         }
10990         bitField0_ |= 0x00000002;
10991         mutateType_ = value;
10992         onChanged();
10993         return this;
10994       }
10995       /**
10996        * <code>optional .MutationProto.MutationType mutate_type = 2;</code>
10997        */
10998       public Builder clearMutateType() {
10999         bitField0_ = (bitField0_ & ~0x00000002);
11000         mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND;
11001         onChanged();
11002         return this;
11003       }
11004 
11005       // repeated .MutationProto.ColumnValue column_value = 3;
11006       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> columnValue_ =
11007         java.util.Collections.emptyList();
11008       private void ensureColumnValueIsMutable() {
11009         if (!((bitField0_ & 0x00000004) == 0x00000004)) {
11010           columnValue_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue>(columnValue_);
11011           bitField0_ |= 0x00000004;
11012          }
11013       }
11014 
11015       private com.google.protobuf.RepeatedFieldBuilder<
11016           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> columnValueBuilder_;
11017 
11018       /**
11019        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11020        */
11021       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> getColumnValueList() {
11022         if (columnValueBuilder_ == null) {
11023           return java.util.Collections.unmodifiableList(columnValue_);
11024         } else {
11025           return columnValueBuilder_.getMessageList();
11026         }
11027       }
11028       /**
11029        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11030        */
11031       public int getColumnValueCount() {
11032         if (columnValueBuilder_ == null) {
11033           return columnValue_.size();
11034         } else {
11035           return columnValueBuilder_.getCount();
11036         }
11037       }
11038       /**
11039        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11040        */
11041       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue getColumnValue(int index) {
11042         if (columnValueBuilder_ == null) {
11043           return columnValue_.get(index);
11044         } else {
11045           return columnValueBuilder_.getMessage(index);
11046         }
11047       }
11048       /**
11049        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11050        */
11051       public Builder setColumnValue(
11052           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue value) {
11053         if (columnValueBuilder_ == null) {
11054           if (value == null) {
11055             throw new NullPointerException();
11056           }
11057           ensureColumnValueIsMutable();
11058           columnValue_.set(index, value);
11059           onChanged();
11060         } else {
11061           columnValueBuilder_.setMessage(index, value);
11062         }
11063         return this;
11064       }
11065       /**
11066        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11067        */
11068       public Builder setColumnValue(
11069           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue) {
11070         if (columnValueBuilder_ == null) {
11071           ensureColumnValueIsMutable();
11072           columnValue_.set(index, builderForValue.build());
11073           onChanged();
11074         } else {
11075           columnValueBuilder_.setMessage(index, builderForValue.build());
11076         }
11077         return this;
11078       }
11079       /**
11080        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11081        */
11082       public Builder addColumnValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue value) {
11083         if (columnValueBuilder_ == null) {
11084           if (value == null) {
11085             throw new NullPointerException();
11086           }
11087           ensureColumnValueIsMutable();
11088           columnValue_.add(value);
11089           onChanged();
11090         } else {
11091           columnValueBuilder_.addMessage(value);
11092         }
11093         return this;
11094       }
11095       /**
11096        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11097        */
11098       public Builder addColumnValue(
11099           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue value) {
11100         if (columnValueBuilder_ == null) {
11101           if (value == null) {
11102             throw new NullPointerException();
11103           }
11104           ensureColumnValueIsMutable();
11105           columnValue_.add(index, value);
11106           onChanged();
11107         } else {
11108           columnValueBuilder_.addMessage(index, value);
11109         }
11110         return this;
11111       }
11112       /**
11113        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11114        */
11115       public Builder addColumnValue(
11116           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue) {
11117         if (columnValueBuilder_ == null) {
11118           ensureColumnValueIsMutable();
11119           columnValue_.add(builderForValue.build());
11120           onChanged();
11121         } else {
11122           columnValueBuilder_.addMessage(builderForValue.build());
11123         }
11124         return this;
11125       }
11126       /**
11127        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11128        */
11129       public Builder addColumnValue(
11130           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue) {
11131         if (columnValueBuilder_ == null) {
11132           ensureColumnValueIsMutable();
11133           columnValue_.add(index, builderForValue.build());
11134           onChanged();
11135         } else {
11136           columnValueBuilder_.addMessage(index, builderForValue.build());
11137         }
11138         return this;
11139       }
11140       /**
11141        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11142        */
11143       public Builder addAllColumnValue(
11144           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> values) {
11145         if (columnValueBuilder_ == null) {
11146           ensureColumnValueIsMutable();
11147           super.addAll(values, columnValue_);
11148           onChanged();
11149         } else {
11150           columnValueBuilder_.addAllMessages(values);
11151         }
11152         return this;
11153       }
11154       /**
11155        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11156        */
11157       public Builder clearColumnValue() {
11158         if (columnValueBuilder_ == null) {
11159           columnValue_ = java.util.Collections.emptyList();
11160           bitField0_ = (bitField0_ & ~0x00000004);
11161           onChanged();
11162         } else {
11163           columnValueBuilder_.clear();
11164         }
11165         return this;
11166       }
11167       /**
11168        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11169        */
11170       public Builder removeColumnValue(int index) {
11171         if (columnValueBuilder_ == null) {
11172           ensureColumnValueIsMutable();
11173           columnValue_.remove(index);
11174           onChanged();
11175         } else {
11176           columnValueBuilder_.remove(index);
11177         }
11178         return this;
11179       }
11180       /**
11181        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11182        */
11183       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder getColumnValueBuilder(
11184           int index) {
11185         return getColumnValueFieldBuilder().getBuilder(index);
11186       }
11187       /**
11188        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11189        */
11190       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder getColumnValueOrBuilder(
11191           int index) {
11192         if (columnValueBuilder_ == null) {
11193           return columnValue_.get(index);  } else {
11194           return columnValueBuilder_.getMessageOrBuilder(index);
11195         }
11196       }
11197       /**
11198        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11199        */
11200       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> 
11201            getColumnValueOrBuilderList() {
11202         if (columnValueBuilder_ != null) {
11203           return columnValueBuilder_.getMessageOrBuilderList();
11204         } else {
11205           return java.util.Collections.unmodifiableList(columnValue_);
11206         }
11207       }
11208       /**
11209        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11210        */
11211       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder addColumnValueBuilder() {
11212         return getColumnValueFieldBuilder().addBuilder(
11213             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance());
11214       }
11215       /**
11216        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11217        */
11218       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder addColumnValueBuilder(
11219           int index) {
11220         return getColumnValueFieldBuilder().addBuilder(
11221             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance());
11222       }
11223       /**
11224        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11225        */
11226       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder> 
11227            getColumnValueBuilderList() {
11228         return getColumnValueFieldBuilder().getBuilderList();
11229       }
11230       private com.google.protobuf.RepeatedFieldBuilder<
11231           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> 
11232           getColumnValueFieldBuilder() {
11233         if (columnValueBuilder_ == null) {
11234           columnValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
11235               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder>(
11236                   columnValue_,
11237                   ((bitField0_ & 0x00000004) == 0x00000004),
11238                   getParentForChildren(),
11239                   isClean());
11240           columnValue_ = null;
11241         }
11242         return columnValueBuilder_;
11243       }
11244 
11245       // optional uint64 timestamp = 4;
11246       private long timestamp_ ;
11247       /**
11248        * <code>optional uint64 timestamp = 4;</code>
11249        */
11250       public boolean hasTimestamp() {
11251         return ((bitField0_ & 0x00000008) == 0x00000008);
11252       }
11253       /**
11254        * <code>optional uint64 timestamp = 4;</code>
11255        */
11256       public long getTimestamp() {
11257         return timestamp_;
11258       }
11259       /**
11260        * <code>optional uint64 timestamp = 4;</code>
11261        */
11262       public Builder setTimestamp(long value) {
11263         bitField0_ |= 0x00000008;
11264         timestamp_ = value;
11265         onChanged();
11266         return this;
11267       }
11268       /**
11269        * <code>optional uint64 timestamp = 4;</code>
11270        */
11271       public Builder clearTimestamp() {
11272         bitField0_ = (bitField0_ & ~0x00000008);
11273         timestamp_ = 0L;
11274         onChanged();
11275         return this;
11276       }
11277 
11278       // repeated .NameBytesPair attribute = 5;
11279       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_ =
11280         java.util.Collections.emptyList();
11281       private void ensureAttributeIsMutable() {
11282         if (!((bitField0_ & 0x00000010) == 0x00000010)) {
11283           attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>(attribute_);
11284           bitField0_ |= 0x00000010;
11285          }
11286       }
11287 
11288       private com.google.protobuf.RepeatedFieldBuilder<
11289           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_;
11290 
11291       /**
11292        * <code>repeated .NameBytesPair attribute = 5;</code>
11293        */
11294       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() {
11295         if (attributeBuilder_ == null) {
11296           return java.util.Collections.unmodifiableList(attribute_);
11297         } else {
11298           return attributeBuilder_.getMessageList();
11299         }
11300       }
11301       /**
11302        * <code>repeated .NameBytesPair attribute = 5;</code>
11303        */
11304       public int getAttributeCount() {
11305         if (attributeBuilder_ == null) {
11306           return attribute_.size();
11307         } else {
11308           return attributeBuilder_.getCount();
11309         }
11310       }
11311       /**
11312        * <code>repeated .NameBytesPair attribute = 5;</code>
11313        */
11314       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) {
11315         if (attributeBuilder_ == null) {
11316           return attribute_.get(index);
11317         } else {
11318           return attributeBuilder_.getMessage(index);
11319         }
11320       }
11321       /**
11322        * <code>repeated .NameBytesPair attribute = 5;</code>
11323        */
11324       public Builder setAttribute(
11325           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
11326         if (attributeBuilder_ == null) {
11327           if (value == null) {
11328             throw new NullPointerException();
11329           }
11330           ensureAttributeIsMutable();
11331           attribute_.set(index, value);
11332           onChanged();
11333         } else {
11334           attributeBuilder_.setMessage(index, value);
11335         }
11336         return this;
11337       }
11338       /**
11339        * <code>repeated .NameBytesPair attribute = 5;</code>
11340        */
11341       public Builder setAttribute(
11342           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
11343         if (attributeBuilder_ == null) {
11344           ensureAttributeIsMutable();
11345           attribute_.set(index, builderForValue.build());
11346           onChanged();
11347         } else {
11348           attributeBuilder_.setMessage(index, builderForValue.build());
11349         }
11350         return this;
11351       }
11352       /**
11353        * <code>repeated .NameBytesPair attribute = 5;</code>
11354        */
11355       public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
11356         if (attributeBuilder_ == null) {
11357           if (value == null) {
11358             throw new NullPointerException();
11359           }
11360           ensureAttributeIsMutable();
11361           attribute_.add(value);
11362           onChanged();
11363         } else {
11364           attributeBuilder_.addMessage(value);
11365         }
11366         return this;
11367       }
11368       /**
11369        * <code>repeated .NameBytesPair attribute = 5;</code>
11370        */
11371       public Builder addAttribute(
11372           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
11373         if (attributeBuilder_ == null) {
11374           if (value == null) {
11375             throw new NullPointerException();
11376           }
11377           ensureAttributeIsMutable();
11378           attribute_.add(index, value);
11379           onChanged();
11380         } else {
11381           attributeBuilder_.addMessage(index, value);
11382         }
11383         return this;
11384       }
11385       /**
11386        * <code>repeated .NameBytesPair attribute = 5;</code>
11387        */
11388       public Builder addAttribute(
11389           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
11390         if (attributeBuilder_ == null) {
11391           ensureAttributeIsMutable();
11392           attribute_.add(builderForValue.build());
11393           onChanged();
11394         } else {
11395           attributeBuilder_.addMessage(builderForValue.build());
11396         }
11397         return this;
11398       }
11399       /**
11400        * <code>repeated .NameBytesPair attribute = 5;</code>
11401        */
11402       public Builder addAttribute(
11403           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
11404         if (attributeBuilder_ == null) {
11405           ensureAttributeIsMutable();
11406           attribute_.add(index, builderForValue.build());
11407           onChanged();
11408         } else {
11409           attributeBuilder_.addMessage(index, builderForValue.build());
11410         }
11411         return this;
11412       }
11413       /**
11414        * <code>repeated .NameBytesPair attribute = 5;</code>
11415        */
11416       public Builder addAllAttribute(
11417           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> values) {
11418         if (attributeBuilder_ == null) {
11419           ensureAttributeIsMutable();
11420           super.addAll(values, attribute_);
11421           onChanged();
11422         } else {
11423           attributeBuilder_.addAllMessages(values);
11424         }
11425         return this;
11426       }
11427       /**
11428        * <code>repeated .NameBytesPair attribute = 5;</code>
11429        */
11430       public Builder clearAttribute() {
11431         if (attributeBuilder_ == null) {
11432           attribute_ = java.util.Collections.emptyList();
11433           bitField0_ = (bitField0_ & ~0x00000010);
11434           onChanged();
11435         } else {
11436           attributeBuilder_.clear();
11437         }
11438         return this;
11439       }
11440       /**
11441        * <code>repeated .NameBytesPair attribute = 5;</code>
11442        */
11443       public Builder removeAttribute(int index) {
11444         if (attributeBuilder_ == null) {
11445           ensureAttributeIsMutable();
11446           attribute_.remove(index);
11447           onChanged();
11448         } else {
11449           attributeBuilder_.remove(index);
11450         }
11451         return this;
11452       }
11453       /**
11454        * <code>repeated .NameBytesPair attribute = 5;</code>
11455        */
11456       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder(
11457           int index) {
11458         return getAttributeFieldBuilder().getBuilder(index);
11459       }
11460       /**
11461        * <code>repeated .NameBytesPair attribute = 5;</code>
11462        */
11463       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
11464           int index) {
11465         if (attributeBuilder_ == null) {
11466           return attribute_.get(index);  } else {
11467           return attributeBuilder_.getMessageOrBuilder(index);
11468         }
11469       }
11470       /**
11471        * <code>repeated .NameBytesPair attribute = 5;</code>
11472        */
11473       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
11474            getAttributeOrBuilderList() {
11475         if (attributeBuilder_ != null) {
11476           return attributeBuilder_.getMessageOrBuilderList();
11477         } else {
11478           return java.util.Collections.unmodifiableList(attribute_);
11479         }
11480       }
11481       /**
11482        * <code>repeated .NameBytesPair attribute = 5;</code>
11483        */
11484       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() {
11485         return getAttributeFieldBuilder().addBuilder(
11486             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
11487       }
11488       /**
11489        * <code>repeated .NameBytesPair attribute = 5;</code>
11490        */
11491       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder(
11492           int index) {
11493         return getAttributeFieldBuilder().addBuilder(
11494             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
11495       }
11496       /**
11497        * <code>repeated .NameBytesPair attribute = 5;</code>
11498        */
11499       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder> 
11500            getAttributeBuilderList() {
11501         return getAttributeFieldBuilder().getBuilderList();
11502       }
11503       private com.google.protobuf.RepeatedFieldBuilder<
11504           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
11505           getAttributeFieldBuilder() {
11506         if (attributeBuilder_ == null) {
11507           attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
11508               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
11509                   attribute_,
11510                   ((bitField0_ & 0x00000010) == 0x00000010),
11511                   getParentForChildren(),
11512                   isClean());
11513           attribute_ = null;
11514         }
11515         return attributeBuilder_;
11516       }
11517 
11518       // optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];
11519       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability durability_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT;
11520       /**
11521        * <code>optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
11522        */
11523       public boolean hasDurability() {
11524         return ((bitField0_ & 0x00000020) == 0x00000020);
11525       }
11526       /**
11527        * <code>optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
11528        */
11529       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability getDurability() {
11530         return durability_;
11531       }
11532       /**
11533        * <code>optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
11534        */
11535       public Builder setDurability(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability value) {
11536         if (value == null) {
11537           throw new NullPointerException();
11538         }
11539         bitField0_ |= 0x00000020;
11540         durability_ = value;
11541         onChanged();
11542         return this;
11543       }
11544       /**
11545        * <code>optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
11546        */
11547       public Builder clearDurability() {
11548         bitField0_ = (bitField0_ & ~0x00000020);
11549         durability_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT;
11550         onChanged();
11551         return this;
11552       }
11553 
11554       // optional .TimeRange time_range = 7;
11555       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
11556       private com.google.protobuf.SingleFieldBuilder<
11557           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_;
11558       /**
11559        * <code>optional .TimeRange time_range = 7;</code>
11560        *
11561        * <pre>
11562        * For some mutations, a result may be returned, in which case,
11563        * time range can be specified for potential performance gain
11564        * </pre>
11565        */
11566       public boolean hasTimeRange() {
11567         return ((bitField0_ & 0x00000040) == 0x00000040);
11568       }
11569       /**
11570        * <code>optional .TimeRange time_range = 7;</code>
11571        *
11572        * <pre>
11573        * For some mutations, a result may be returned, in which case,
11574        * time range can be specified for potential performance gain
11575        * </pre>
11576        */
11577       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() {
11578         if (timeRangeBuilder_ == null) {
11579           return timeRange_;
11580         } else {
11581           return timeRangeBuilder_.getMessage();
11582         }
11583       }
11584       /**
11585        * <code>optional .TimeRange time_range = 7;</code>
11586        *
11587        * <pre>
11588        * For some mutations, a result may be returned, in which case,
11589        * time range can be specified for potential performance gain
11590        * </pre>
11591        */
11592       public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) {
11593         if (timeRangeBuilder_ == null) {
11594           if (value == null) {
11595             throw new NullPointerException();
11596           }
11597           timeRange_ = value;
11598           onChanged();
11599         } else {
11600           timeRangeBuilder_.setMessage(value);
11601         }
11602         bitField0_ |= 0x00000040;
11603         return this;
11604       }
11605       /**
11606        * <code>optional .TimeRange time_range = 7;</code>
11607        *
11608        * <pre>
11609        * For some mutations, a result may be returned, in which case,
11610        * time range can be specified for potential performance gain
11611        * </pre>
11612        */
11613       public Builder setTimeRange(
11614           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) {
11615         if (timeRangeBuilder_ == null) {
11616           timeRange_ = builderForValue.build();
11617           onChanged();
11618         } else {
11619           timeRangeBuilder_.setMessage(builderForValue.build());
11620         }
11621         bitField0_ |= 0x00000040;
11622         return this;
11623       }
11624       /**
11625        * <code>optional .TimeRange time_range = 7;</code>
11626        *
11627        * <pre>
11628        * For some mutations, a result may be returned, in which case,
11629        * time range can be specified for potential performance gain
11630        * </pre>
11631        */
11632       public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) {
11633         if (timeRangeBuilder_ == null) {
11634           if (((bitField0_ & 0x00000040) == 0x00000040) &&
11635               timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) {
11636             timeRange_ =
11637               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial();
11638           } else {
11639             timeRange_ = value;
11640           }
11641           onChanged();
11642         } else {
11643           timeRangeBuilder_.mergeFrom(value);
11644         }
11645         bitField0_ |= 0x00000040;
11646         return this;
11647       }
11648       /**
11649        * <code>optional .TimeRange time_range = 7;</code>
11650        *
11651        * <pre>
11652        * For some mutations, a result may be returned, in which case,
11653        * time range can be specified for potential performance gain
11654        * </pre>
11655        */
11656       public Builder clearTimeRange() {
11657         if (timeRangeBuilder_ == null) {
11658           timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
11659           onChanged();
11660         } else {
11661           timeRangeBuilder_.clear();
11662         }
11663         bitField0_ = (bitField0_ & ~0x00000040);
11664         return this;
11665       }
11666       /**
11667        * <code>optional .TimeRange time_range = 7;</code>
11668        *
11669        * <pre>
11670        * For some mutations, a result may be returned, in which case,
11671        * time range can be specified for potential performance gain
11672        * </pre>
11673        */
11674       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() {
11675         bitField0_ |= 0x00000040;
11676         onChanged();
11677         return getTimeRangeFieldBuilder().getBuilder();
11678       }
11679       /**
11680        * <code>optional .TimeRange time_range = 7;</code>
11681        *
11682        * <pre>
11683        * For some mutations, a result may be returned, in which case,
11684        * time range can be specified for potential performance gain
11685        * </pre>
11686        */
11687       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() {
11688         if (timeRangeBuilder_ != null) {
11689           return timeRangeBuilder_.getMessageOrBuilder();
11690         } else {
11691           return timeRange_;
11692         }
11693       }
11694       /**
11695        * <code>optional .TimeRange time_range = 7;</code>
11696        *
11697        * <pre>
11698        * For some mutations, a result may be returned, in which case,
11699        * time range can be specified for potential performance gain
11700        * </pre>
11701        */
11702       private com.google.protobuf.SingleFieldBuilder<
11703           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> 
11704           getTimeRangeFieldBuilder() {
11705         if (timeRangeBuilder_ == null) {
11706           timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder<
11707               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>(
11708                   timeRange_,
11709                   getParentForChildren(),
11710                   isClean());
11711           timeRange_ = null;
11712         }
11713         return timeRangeBuilder_;
11714       }
11715 
11716       // optional int32 associated_cell_count = 8;
11717       private int associatedCellCount_ ;
11718       /**
11719        * <code>optional int32 associated_cell_count = 8;</code>
11720        *
11721        * <pre>
11722        * The below count is set when the associated cells are NOT
11723        * part of this protobuf message; they are passed alongside
11724        * and then this Message is a placeholder with metadata.  The
11725        * count is needed to know how many to peel off the block of Cells as
11726        * ours.  NOTE: This is different from the pb managed cell_count of the
11727        * 'cell' field above which is non-null when the cells are pb'd.
11728        * </pre>
11729        */
11730       public boolean hasAssociatedCellCount() {
11731         return ((bitField0_ & 0x00000080) == 0x00000080);
11732       }
11733       /**
11734        * <code>optional int32 associated_cell_count = 8;</code>
11735        *
11736        * <pre>
11737        * The below count is set when the associated cells are NOT
11738        * part of this protobuf message; they are passed alongside
11739        * and then this Message is a placeholder with metadata.  The
11740        * count is needed to know how many to peel off the block of Cells as
11741        * ours.  NOTE: This is different from the pb managed cell_count of the
11742        * 'cell' field above which is non-null when the cells are pb'd.
11743        * </pre>
11744        */
11745       public int getAssociatedCellCount() {
11746         return associatedCellCount_;
11747       }
11748       /**
11749        * <code>optional int32 associated_cell_count = 8;</code>
11750        *
11751        * <pre>
11752        * The below count is set when the associated cells are NOT
11753        * part of this protobuf message; they are passed alongside
11754        * and then this Message is a placeholder with metadata.  The
11755        * count is needed to know how many to peel off the block of Cells as
11756        * ours.  NOTE: This is different from the pb managed cell_count of the
11757        * 'cell' field above which is non-null when the cells are pb'd.
11758        * </pre>
11759        */
11760       public Builder setAssociatedCellCount(int value) {
11761         bitField0_ |= 0x00000080;
11762         associatedCellCount_ = value;
11763         onChanged();
11764         return this;
11765       }
11766       /**
11767        * <code>optional int32 associated_cell_count = 8;</code>
11768        *
11769        * <pre>
11770        * The below count is set when the associated cells are NOT
11771        * part of this protobuf message; they are passed alongside
11772        * and then this Message is a placeholder with metadata.  The
11773        * count is needed to know how many to peel off the block of Cells as
11774        * ours.  NOTE: This is different from the pb managed cell_count of the
11775        * 'cell' field above which is non-null when the cells are pb'd.
11776        * </pre>
11777        */
11778       public Builder clearAssociatedCellCount() {
11779         bitField0_ = (bitField0_ & ~0x00000080);
11780         associatedCellCount_ = 0;
11781         onChanged();
11782         return this;
11783       }
11784 
11785       // optional uint64 nonce = 9;
11786       private long nonce_ ;
11787       /**
11788        * <code>optional uint64 nonce = 9;</code>
11789        */
11790       public boolean hasNonce() {
11791         return ((bitField0_ & 0x00000100) == 0x00000100);
11792       }
11793       /**
11794        * <code>optional uint64 nonce = 9;</code>
11795        */
11796       public long getNonce() {
11797         return nonce_;
11798       }
11799       /**
11800        * <code>optional uint64 nonce = 9;</code>
11801        */
11802       public Builder setNonce(long value) {
11803         bitField0_ |= 0x00000100;
11804         nonce_ = value;
11805         onChanged();
11806         return this;
11807       }
11808       /**
11809        * <code>optional uint64 nonce = 9;</code>
11810        */
11811       public Builder clearNonce() {
11812         bitField0_ = (bitField0_ & ~0x00000100);
11813         nonce_ = 0L;
11814         onChanged();
11815         return this;
11816       }
11817 
11818       // @@protoc_insertion_point(builder_scope:MutationProto)
11819     }
11820 
11821     static {
11822       defaultInstance = new MutationProto(true);
11823       defaultInstance.initFields();
11824     }
11825 
11826     // @@protoc_insertion_point(class_scope:MutationProto)
11827   }
11828 
11829   public interface MutateRequestOrBuilder
11830       extends com.google.protobuf.MessageOrBuilder {
11831 
11832     // required .RegionSpecifier region = 1;
11833     /**
11834      * <code>required .RegionSpecifier region = 1;</code>
11835      */
11836     boolean hasRegion();
11837     /**
11838      * <code>required .RegionSpecifier region = 1;</code>
11839      */
11840     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
11841     /**
11842      * <code>required .RegionSpecifier region = 1;</code>
11843      */
11844     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
11845 
11846     // required .MutationProto mutation = 2;
11847     /**
11848      * <code>required .MutationProto mutation = 2;</code>
11849      */
11850     boolean hasMutation();
11851     /**
11852      * <code>required .MutationProto mutation = 2;</code>
11853      */
11854     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation();
11855     /**
11856      * <code>required .MutationProto mutation = 2;</code>
11857      */
11858     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder();
11859 
11860     // optional .Condition condition = 3;
11861     /**
11862      * <code>optional .Condition condition = 3;</code>
11863      */
11864     boolean hasCondition();
11865     /**
11866      * <code>optional .Condition condition = 3;</code>
11867      */
11868     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition();
11869     /**
11870      * <code>optional .Condition condition = 3;</code>
11871      */
11872     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder();
11873 
11874     // optional uint64 nonce_group = 4;
11875     /**
11876      * <code>optional uint64 nonce_group = 4;</code>
11877      */
11878     boolean hasNonceGroup();
11879     /**
11880      * <code>optional uint64 nonce_group = 4;</code>
11881      */
11882     long getNonceGroup();
11883   }
11884   /**
11885    * Protobuf type {@code MutateRequest}
11886    *
11887    * <pre>
11888    **
11889    * The mutate request. Perform a single Mutate operation.
11890    *
11891    * Optionally, you can specify a condition. The mutate
11892    * will take place only if the condition is met.  Otherwise,
11893    * the mutate will be ignored.  In the response result,
11894    * parameter processed is used to indicate if the mutate
11895    * actually happened.
11896    * </pre>
11897    */
11898   public static final class MutateRequest extends
11899       com.google.protobuf.GeneratedMessage
11900       implements MutateRequestOrBuilder {
11901     // Use MutateRequest.newBuilder() to construct.
11902     private MutateRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
11903       super(builder);
11904       this.unknownFields = builder.getUnknownFields();
11905     }
11906     private MutateRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
11907 
11908     private static final MutateRequest defaultInstance;
11909     public static MutateRequest getDefaultInstance() {
11910       return defaultInstance;
11911     }
11912 
11913     public MutateRequest getDefaultInstanceForType() {
11914       return defaultInstance;
11915     }
11916 
11917     private final com.google.protobuf.UnknownFieldSet unknownFields;
11918     @java.lang.Override
11919     public final com.google.protobuf.UnknownFieldSet
11920         getUnknownFields() {
11921       return this.unknownFields;
11922     }
11923     private MutateRequest(
11924         com.google.protobuf.CodedInputStream input,
11925         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11926         throws com.google.protobuf.InvalidProtocolBufferException {
11927       initFields();
11928       int mutable_bitField0_ = 0;
11929       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
11930           com.google.protobuf.UnknownFieldSet.newBuilder();
11931       try {
11932         boolean done = false;
11933         while (!done) {
11934           int tag = input.readTag();
11935           switch (tag) {
11936             case 0:
11937               done = true;
11938               break;
11939             default: {
11940               if (!parseUnknownField(input, unknownFields,
11941                                      extensionRegistry, tag)) {
11942                 done = true;
11943               }
11944               break;
11945             }
11946             case 10: {
11947               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
11948               if (((bitField0_ & 0x00000001) == 0x00000001)) {
11949                 subBuilder = region_.toBuilder();
11950               }
11951               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
11952               if (subBuilder != null) {
11953                 subBuilder.mergeFrom(region_);
11954                 region_ = subBuilder.buildPartial();
11955               }
11956               bitField0_ |= 0x00000001;
11957               break;
11958             }
11959             case 18: {
11960               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder subBuilder = null;
11961               if (((bitField0_ & 0x00000002) == 0x00000002)) {
11962                 subBuilder = mutation_.toBuilder();
11963               }
11964               mutation_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.PARSER, extensionRegistry);
11965               if (subBuilder != null) {
11966                 subBuilder.mergeFrom(mutation_);
11967                 mutation_ = subBuilder.buildPartial();
11968               }
11969               bitField0_ |= 0x00000002;
11970               break;
11971             }
11972             case 26: {
11973               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder subBuilder = null;
11974               if (((bitField0_ & 0x00000004) == 0x00000004)) {
11975                 subBuilder = condition_.toBuilder();
11976               }
11977               condition_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.PARSER, extensionRegistry);
11978               if (subBuilder != null) {
11979                 subBuilder.mergeFrom(condition_);
11980                 condition_ = subBuilder.buildPartial();
11981               }
11982               bitField0_ |= 0x00000004;
11983               break;
11984             }
11985             case 32: {
11986               bitField0_ |= 0x00000008;
11987               nonceGroup_ = input.readUInt64();
11988               break;
11989             }
11990           }
11991         }
11992       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11993         throw e.setUnfinishedMessage(this);
11994       } catch (java.io.IOException e) {
11995         throw new com.google.protobuf.InvalidProtocolBufferException(
11996             e.getMessage()).setUnfinishedMessage(this);
11997       } finally {
11998         this.unknownFields = unknownFields.build();
11999         makeExtensionsImmutable();
12000       }
12001     }
12002     public static final com.google.protobuf.Descriptors.Descriptor
12003         getDescriptor() {
12004       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_descriptor;
12005     }
12006 
12007     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
12008         internalGetFieldAccessorTable() {
12009       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_fieldAccessorTable
12010           .ensureFieldAccessorsInitialized(
12011               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.Builder.class);
12012     }
12013 
12014     public static com.google.protobuf.Parser<MutateRequest> PARSER =
12015         new com.google.protobuf.AbstractParser<MutateRequest>() {
12016       public MutateRequest parsePartialFrom(
12017           com.google.protobuf.CodedInputStream input,
12018           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12019           throws com.google.protobuf.InvalidProtocolBufferException {
12020         return new MutateRequest(input, extensionRegistry);
12021       }
12022     };
12023 
12024     @java.lang.Override
12025     public com.google.protobuf.Parser<MutateRequest> getParserForType() {
12026       return PARSER;
12027     }
12028 
12029     private int bitField0_;
12030     // required .RegionSpecifier region = 1;
12031     public static final int REGION_FIELD_NUMBER = 1;
12032     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
12033     /**
12034      * <code>required .RegionSpecifier region = 1;</code>
12035      */
12036     public boolean hasRegion() {
12037       return ((bitField0_ & 0x00000001) == 0x00000001);
12038     }
12039     /**
12040      * <code>required .RegionSpecifier region = 1;</code>
12041      */
12042     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
12043       return region_;
12044     }
12045     /**
12046      * <code>required .RegionSpecifier region = 1;</code>
12047      */
12048     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
12049       return region_;
12050     }
12051 
12052     // required .MutationProto mutation = 2;
12053     public static final int MUTATION_FIELD_NUMBER = 2;
12054     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto mutation_;
12055     /**
12056      * <code>required .MutationProto mutation = 2;</code>
12057      */
12058     public boolean hasMutation() {
12059       return ((bitField0_ & 0x00000002) == 0x00000002);
12060     }
12061     /**
12062      * <code>required .MutationProto mutation = 2;</code>
12063      */
12064     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation() {
12065       return mutation_;
12066     }
12067     /**
12068      * <code>required .MutationProto mutation = 2;</code>
12069      */
12070     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() {
12071       return mutation_;
12072     }
12073 
12074     // optional .Condition condition = 3;
12075     public static final int CONDITION_FIELD_NUMBER = 3;
12076     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_;
12077     /**
12078      * <code>optional .Condition condition = 3;</code>
12079      */
12080     public boolean hasCondition() {
12081       return ((bitField0_ & 0x00000004) == 0x00000004);
12082     }
12083     /**
12084      * <code>optional .Condition condition = 3;</code>
12085      */
12086     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() {
12087       return condition_;
12088     }
12089     /**
12090      * <code>optional .Condition condition = 3;</code>
12091      */
12092     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() {
12093       return condition_;
12094     }
12095 
12096     // optional uint64 nonce_group = 4;
12097     public static final int NONCE_GROUP_FIELD_NUMBER = 4;
12098     private long nonceGroup_;
12099     /**
12100      * <code>optional uint64 nonce_group = 4;</code>
12101      */
12102     public boolean hasNonceGroup() {
12103       return ((bitField0_ & 0x00000008) == 0x00000008);
12104     }
12105     /**
12106      * <code>optional uint64 nonce_group = 4;</code>
12107      */
12108     public long getNonceGroup() {
12109       return nonceGroup_;
12110     }
12111 
12112     private void initFields() {
12113       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
12114       mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
12115       condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
12116       nonceGroup_ = 0L;
12117     }
12118     private byte memoizedIsInitialized = -1;
12119     public final boolean isInitialized() {
12120       byte isInitialized = memoizedIsInitialized;
12121       if (isInitialized != -1) return isInitialized == 1;
12122 
12123       if (!hasRegion()) {
12124         memoizedIsInitialized = 0;
12125         return false;
12126       }
12127       if (!hasMutation()) {
12128         memoizedIsInitialized = 0;
12129         return false;
12130       }
12131       if (!getRegion().isInitialized()) {
12132         memoizedIsInitialized = 0;
12133         return false;
12134       }
12135       if (!getMutation().isInitialized()) {
12136         memoizedIsInitialized = 0;
12137         return false;
12138       }
12139       if (hasCondition()) {
12140         if (!getCondition().isInitialized()) {
12141           memoizedIsInitialized = 0;
12142           return false;
12143         }
12144       }
12145       memoizedIsInitialized = 1;
12146       return true;
12147     }
12148 
12149     public void writeTo(com.google.protobuf.CodedOutputStream output)
12150                         throws java.io.IOException {
12151       getSerializedSize();
12152       if (((bitField0_ & 0x00000001) == 0x00000001)) {
12153         output.writeMessage(1, region_);
12154       }
12155       if (((bitField0_ & 0x00000002) == 0x00000002)) {
12156         output.writeMessage(2, mutation_);
12157       }
12158       if (((bitField0_ & 0x00000004) == 0x00000004)) {
12159         output.writeMessage(3, condition_);
12160       }
12161       if (((bitField0_ & 0x00000008) == 0x00000008)) {
12162         output.writeUInt64(4, nonceGroup_);
12163       }
12164       getUnknownFields().writeTo(output);
12165     }
12166 
12167     private int memoizedSerializedSize = -1;
12168     public int getSerializedSize() {
12169       int size = memoizedSerializedSize;
12170       if (size != -1) return size;
12171 
12172       size = 0;
12173       if (((bitField0_ & 0x00000001) == 0x00000001)) {
12174         size += com.google.protobuf.CodedOutputStream
12175           .computeMessageSize(1, region_);
12176       }
12177       if (((bitField0_ & 0x00000002) == 0x00000002)) {
12178         size += com.google.protobuf.CodedOutputStream
12179           .computeMessageSize(2, mutation_);
12180       }
12181       if (((bitField0_ & 0x00000004) == 0x00000004)) {
12182         size += com.google.protobuf.CodedOutputStream
12183           .computeMessageSize(3, condition_);
12184       }
12185       if (((bitField0_ & 0x00000008) == 0x00000008)) {
12186         size += com.google.protobuf.CodedOutputStream
12187           .computeUInt64Size(4, nonceGroup_);
12188       }
12189       size += getUnknownFields().getSerializedSize();
12190       memoizedSerializedSize = size;
12191       return size;
12192     }
12193 
12194     private static final long serialVersionUID = 0L;
12195     @java.lang.Override
12196     protected java.lang.Object writeReplace()
12197         throws java.io.ObjectStreamException {
12198       return super.writeReplace();
12199     }
12200 
12201     @java.lang.Override
12202     public boolean equals(final java.lang.Object obj) {
12203       if (obj == this) {
12204        return true;
12205       }
12206       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)) {
12207         return super.equals(obj);
12208       }
12209       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) obj;
12210 
12211       boolean result = true;
12212       result = result && (hasRegion() == other.hasRegion());
12213       if (hasRegion()) {
12214         result = result && getRegion()
12215             .equals(other.getRegion());
12216       }
12217       result = result && (hasMutation() == other.hasMutation());
12218       if (hasMutation()) {
12219         result = result && getMutation()
12220             .equals(other.getMutation());
12221       }
12222       result = result && (hasCondition() == other.hasCondition());
12223       if (hasCondition()) {
12224         result = result && getCondition()
12225             .equals(other.getCondition());
12226       }
12227       result = result && (hasNonceGroup() == other.hasNonceGroup());
12228       if (hasNonceGroup()) {
12229         result = result && (getNonceGroup()
12230             == other.getNonceGroup());
12231       }
12232       result = result &&
12233           getUnknownFields().equals(other.getUnknownFields());
12234       return result;
12235     }
12236 
12237     private int memoizedHashCode = 0;
12238     @java.lang.Override
12239     public int hashCode() {
12240       if (memoizedHashCode != 0) {
12241         return memoizedHashCode;
12242       }
12243       int hash = 41;
12244       hash = (19 * hash) + getDescriptorForType().hashCode();
12245       if (hasRegion()) {
12246         hash = (37 * hash) + REGION_FIELD_NUMBER;
12247         hash = (53 * hash) + getRegion().hashCode();
12248       }
12249       if (hasMutation()) {
12250         hash = (37 * hash) + MUTATION_FIELD_NUMBER;
12251         hash = (53 * hash) + getMutation().hashCode();
12252       }
12253       if (hasCondition()) {
12254         hash = (37 * hash) + CONDITION_FIELD_NUMBER;
12255         hash = (53 * hash) + getCondition().hashCode();
12256       }
12257       if (hasNonceGroup()) {
12258         hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER;
12259         hash = (53 * hash) + hashLong(getNonceGroup());
12260       }
12261       hash = (29 * hash) + getUnknownFields().hashCode();
12262       memoizedHashCode = hash;
12263       return hash;
12264     }
12265 
12266     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(
12267         com.google.protobuf.ByteString data)
12268         throws com.google.protobuf.InvalidProtocolBufferException {
12269       return PARSER.parseFrom(data);
12270     }
12271     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(
12272         com.google.protobuf.ByteString data,
12273         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12274         throws com.google.protobuf.InvalidProtocolBufferException {
12275       return PARSER.parseFrom(data, extensionRegistry);
12276     }
12277     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(byte[] data)
12278         throws com.google.protobuf.InvalidProtocolBufferException {
12279       return PARSER.parseFrom(data);
12280     }
12281     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(
12282         byte[] data,
12283         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12284         throws com.google.protobuf.InvalidProtocolBufferException {
12285       return PARSER.parseFrom(data, extensionRegistry);
12286     }
12287     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(java.io.InputStream input)
12288         throws java.io.IOException {
12289       return PARSER.parseFrom(input);
12290     }
12291     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(
12292         java.io.InputStream input,
12293         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12294         throws java.io.IOException {
12295       return PARSER.parseFrom(input, extensionRegistry);
12296     }
12297     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseDelimitedFrom(java.io.InputStream input)
12298         throws java.io.IOException {
12299       return PARSER.parseDelimitedFrom(input);
12300     }
12301     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseDelimitedFrom(
12302         java.io.InputStream input,
12303         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12304         throws java.io.IOException {
12305       return PARSER.parseDelimitedFrom(input, extensionRegistry);
12306     }
12307     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(
12308         com.google.protobuf.CodedInputStream input)
12309         throws java.io.IOException {
12310       return PARSER.parseFrom(input);
12311     }
12312     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(
12313         com.google.protobuf.CodedInputStream input,
12314         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12315         throws java.io.IOException {
12316       return PARSER.parseFrom(input, extensionRegistry);
12317     }
12318 
12319     public static Builder newBuilder() { return Builder.create(); }
12320     public Builder newBuilderForType() { return newBuilder(); }
12321     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest prototype) {
12322       return newBuilder().mergeFrom(prototype);
12323     }
12324     public Builder toBuilder() { return newBuilder(this); }
12325 
12326     @java.lang.Override
12327     protected Builder newBuilderForType(
12328         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12329       Builder builder = new Builder(parent);
12330       return builder;
12331     }
12332     /**
12333      * Protobuf type {@code MutateRequest}
12334      *
12335      * <pre>
12336      **
12337      * The mutate request. Perform a single Mutate operation.
12338      *
12339      * Optionally, you can specify a condition. The mutate
12340      * will take place only if the condition is met.  Otherwise,
12341      * the mutate will be ignored.  In the response result,
12342      * parameter processed is used to indicate if the mutate
12343      * actually happened.
12344      * </pre>
12345      */
12346     public static final class Builder extends
12347         com.google.protobuf.GeneratedMessage.Builder<Builder>
12348        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequestOrBuilder {
12349       public static final com.google.protobuf.Descriptors.Descriptor
12350           getDescriptor() {
12351         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_descriptor;
12352       }
12353 
12354       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
12355           internalGetFieldAccessorTable() {
12356         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_fieldAccessorTable
12357             .ensureFieldAccessorsInitialized(
12358                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.Builder.class);
12359       }
12360 
12361       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.newBuilder()
12362       private Builder() {
12363         maybeForceBuilderInitialization();
12364       }
12365 
12366       private Builder(
12367           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12368         super(parent);
12369         maybeForceBuilderInitialization();
12370       }
12371       private void maybeForceBuilderInitialization() {
12372         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
12373           getRegionFieldBuilder();
12374           getMutationFieldBuilder();
12375           getConditionFieldBuilder();
12376         }
12377       }
12378       private static Builder create() {
12379         return new Builder();
12380       }
12381 
12382       public Builder clear() {
12383         super.clear();
12384         if (regionBuilder_ == null) {
12385           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
12386         } else {
12387           regionBuilder_.clear();
12388         }
12389         bitField0_ = (bitField0_ & ~0x00000001);
12390         if (mutationBuilder_ == null) {
12391           mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
12392         } else {
12393           mutationBuilder_.clear();
12394         }
12395         bitField0_ = (bitField0_ & ~0x00000002);
12396         if (conditionBuilder_ == null) {
12397           condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
12398         } else {
12399           conditionBuilder_.clear();
12400         }
12401         bitField0_ = (bitField0_ & ~0x00000004);
12402         nonceGroup_ = 0L;
12403         bitField0_ = (bitField0_ & ~0x00000008);
12404         return this;
12405       }
12406 
12407       public Builder clone() {
12408         return create().mergeFrom(buildPartial());
12409       }
12410 
12411       public com.google.protobuf.Descriptors.Descriptor
12412           getDescriptorForType() {
12413         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_descriptor;
12414       }
12415 
12416       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest getDefaultInstanceForType() {
12417         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance();
12418       }
12419 
12420       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest build() {
12421         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = buildPartial();
12422         if (!result.isInitialized()) {
12423           throw newUninitializedMessageException(result);
12424         }
12425         return result;
12426       }
12427 
12428       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest buildPartial() {
12429         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest(this);
12430         int from_bitField0_ = bitField0_;
12431         int to_bitField0_ = 0;
12432         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
12433           to_bitField0_ |= 0x00000001;
12434         }
12435         if (regionBuilder_ == null) {
12436           result.region_ = region_;
12437         } else {
12438           result.region_ = regionBuilder_.build();
12439         }
12440         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
12441           to_bitField0_ |= 0x00000002;
12442         }
12443         if (mutationBuilder_ == null) {
12444           result.mutation_ = mutation_;
12445         } else {
12446           result.mutation_ = mutationBuilder_.build();
12447         }
12448         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
12449           to_bitField0_ |= 0x00000004;
12450         }
12451         if (conditionBuilder_ == null) {
12452           result.condition_ = condition_;
12453         } else {
12454           result.condition_ = conditionBuilder_.build();
12455         }
12456         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
12457           to_bitField0_ |= 0x00000008;
12458         }
12459         result.nonceGroup_ = nonceGroup_;
12460         result.bitField0_ = to_bitField0_;
12461         onBuilt();
12462         return result;
12463       }
12464 
12465       public Builder mergeFrom(com.google.protobuf.Message other) {
12466         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) {
12467           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)other);
12468         } else {
12469           super.mergeFrom(other);
12470           return this;
12471         }
12472       }
12473 
12474       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest other) {
12475         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance()) return this;
12476         if (other.hasRegion()) {
12477           mergeRegion(other.getRegion());
12478         }
12479         if (other.hasMutation()) {
12480           mergeMutation(other.getMutation());
12481         }
12482         if (other.hasCondition()) {
12483           mergeCondition(other.getCondition());
12484         }
12485         if (other.hasNonceGroup()) {
12486           setNonceGroup(other.getNonceGroup());
12487         }
12488         this.mergeUnknownFields(other.getUnknownFields());
12489         return this;
12490       }
12491 
12492       public final boolean isInitialized() {
12493         if (!hasRegion()) {
12494           
12495           return false;
12496         }
12497         if (!hasMutation()) {
12498           
12499           return false;
12500         }
12501         if (!getRegion().isInitialized()) {
12502           
12503           return false;
12504         }
12505         if (!getMutation().isInitialized()) {
12506           
12507           return false;
12508         }
12509         if (hasCondition()) {
12510           if (!getCondition().isInitialized()) {
12511             
12512             return false;
12513           }
12514         }
12515         return true;
12516       }
12517 
12518       public Builder mergeFrom(
12519           com.google.protobuf.CodedInputStream input,
12520           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12521           throws java.io.IOException {
12522         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parsedMessage = null;
12523         try {
12524           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
12525         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
12526           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) e.getUnfinishedMessage();
12527           throw e;
12528         } finally {
12529           if (parsedMessage != null) {
12530             mergeFrom(parsedMessage);
12531           }
12532         }
12533         return this;
12534       }
12535       private int bitField0_;
12536 
12537       // required .RegionSpecifier region = 1;
12538       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
12539       private com.google.protobuf.SingleFieldBuilder<
12540           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
12541       /**
12542        * <code>required .RegionSpecifier region = 1;</code>
12543        */
12544       public boolean hasRegion() {
12545         return ((bitField0_ & 0x00000001) == 0x00000001);
12546       }
12547       /**
12548        * <code>required .RegionSpecifier region = 1;</code>
12549        */
12550       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
12551         if (regionBuilder_ == null) {
12552           return region_;
12553         } else {
12554           return regionBuilder_.getMessage();
12555         }
12556       }
12557       /**
12558        * <code>required .RegionSpecifier region = 1;</code>
12559        */
12560       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
12561         if (regionBuilder_ == null) {
12562           if (value == null) {
12563             throw new NullPointerException();
12564           }
12565           region_ = value;
12566           onChanged();
12567         } else {
12568           regionBuilder_.setMessage(value);
12569         }
12570         bitField0_ |= 0x00000001;
12571         return this;
12572       }
12573       /**
12574        * <code>required .RegionSpecifier region = 1;</code>
12575        */
12576       public Builder setRegion(
12577           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
12578         if (regionBuilder_ == null) {
12579           region_ = builderForValue.build();
12580           onChanged();
12581         } else {
12582           regionBuilder_.setMessage(builderForValue.build());
12583         }
12584         bitField0_ |= 0x00000001;
12585         return this;
12586       }
12587       /**
12588        * <code>required .RegionSpecifier region = 1;</code>
12589        */
12590       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
12591         if (regionBuilder_ == null) {
12592           if (((bitField0_ & 0x00000001) == 0x00000001) &&
12593               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
12594             region_ =
12595               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
12596           } else {
12597             region_ = value;
12598           }
12599           onChanged();
12600         } else {
12601           regionBuilder_.mergeFrom(value);
12602         }
12603         bitField0_ |= 0x00000001;
12604         return this;
12605       }
12606       /**
12607        * <code>required .RegionSpecifier region = 1;</code>
12608        */
12609       public Builder clearRegion() {
12610         if (regionBuilder_ == null) {
12611           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
12612           onChanged();
12613         } else {
12614           regionBuilder_.clear();
12615         }
12616         bitField0_ = (bitField0_ & ~0x00000001);
12617         return this;
12618       }
12619       /**
12620        * <code>required .RegionSpecifier region = 1;</code>
12621        */
12622       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
12623         bitField0_ |= 0x00000001;
12624         onChanged();
12625         return getRegionFieldBuilder().getBuilder();
12626       }
12627       /**
12628        * <code>required .RegionSpecifier region = 1;</code>
12629        */
12630       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
12631         if (regionBuilder_ != null) {
12632           return regionBuilder_.getMessageOrBuilder();
12633         } else {
12634           return region_;
12635         }
12636       }
12637       /**
12638        * <code>required .RegionSpecifier region = 1;</code>
12639        */
12640       private com.google.protobuf.SingleFieldBuilder<
12641           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> 
12642           getRegionFieldBuilder() {
12643         if (regionBuilder_ == null) {
12644           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
12645               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
12646                   region_,
12647                   getParentForChildren(),
12648                   isClean());
12649           region_ = null;
12650         }
12651         return regionBuilder_;
12652       }
12653 
12654       // required .MutationProto mutation = 2;
12655       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
12656       private com.google.protobuf.SingleFieldBuilder<
12657           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> mutationBuilder_;
12658       /**
12659        * <code>required .MutationProto mutation = 2;</code>
12660        */
12661       public boolean hasMutation() {
12662         return ((bitField0_ & 0x00000002) == 0x00000002);
12663       }
12664       /**
12665        * <code>required .MutationProto mutation = 2;</code>
12666        */
12667       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation() {
12668         if (mutationBuilder_ == null) {
12669           return mutation_;
12670         } else {
12671           return mutationBuilder_.getMessage();
12672         }
12673       }
12674       /**
12675        * <code>required .MutationProto mutation = 2;</code>
12676        */
12677       public Builder setMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
12678         if (mutationBuilder_ == null) {
12679           if (value == null) {
12680             throw new NullPointerException();
12681           }
12682           mutation_ = value;
12683           onChanged();
12684         } else {
12685           mutationBuilder_.setMessage(value);
12686         }
12687         bitField0_ |= 0x00000002;
12688         return this;
12689       }
12690       /**
12691        * <code>required .MutationProto mutation = 2;</code>
12692        */
12693       public Builder setMutation(
12694           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) {
12695         if (mutationBuilder_ == null) {
12696           mutation_ = builderForValue.build();
12697           onChanged();
12698         } else {
12699           mutationBuilder_.setMessage(builderForValue.build());
12700         }
12701         bitField0_ |= 0x00000002;
12702         return this;
12703       }
12704       /**
12705        * <code>required .MutationProto mutation = 2;</code>
12706        */
12707       public Builder mergeMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
12708         if (mutationBuilder_ == null) {
12709           if (((bitField0_ & 0x00000002) == 0x00000002) &&
12710               mutation_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()) {
12711             mutation_ =
12712               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.newBuilder(mutation_).mergeFrom(value).buildPartial();
12713           } else {
12714             mutation_ = value;
12715           }
12716           onChanged();
12717         } else {
12718           mutationBuilder_.mergeFrom(value);
12719         }
12720         bitField0_ |= 0x00000002;
12721         return this;
12722       }
12723       /**
12724        * <code>required .MutationProto mutation = 2;</code>
12725        */
12726       public Builder clearMutation() {
12727         if (mutationBuilder_ == null) {
12728           mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
12729           onChanged();
12730         } else {
12731           mutationBuilder_.clear();
12732         }
12733         bitField0_ = (bitField0_ & ~0x00000002);
12734         return this;
12735       }
12736       /**
12737        * <code>required .MutationProto mutation = 2;</code>
12738        */
12739       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder getMutationBuilder() {
12740         bitField0_ |= 0x00000002;
12741         onChanged();
12742         return getMutationFieldBuilder().getBuilder();
12743       }
12744       /**
12745        * <code>required .MutationProto mutation = 2;</code>
12746        */
12747       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() {
12748         if (mutationBuilder_ != null) {
12749           return mutationBuilder_.getMessageOrBuilder();
12750         } else {
12751           return mutation_;
12752         }
12753       }
12754       /**
12755        * <code>required .MutationProto mutation = 2;</code>
12756        */
12757       private com.google.protobuf.SingleFieldBuilder<
12758           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> 
12759           getMutationFieldBuilder() {
12760         if (mutationBuilder_ == null) {
12761           mutationBuilder_ = new com.google.protobuf.SingleFieldBuilder<
12762               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>(
12763                   mutation_,
12764                   getParentForChildren(),
12765                   isClean());
12766           mutation_ = null;
12767         }
12768         return mutationBuilder_;
12769       }
12770 
12771       // optional .Condition condition = 3;
12772       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
12773       private com.google.protobuf.SingleFieldBuilder<
12774           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder> conditionBuilder_;
12775       /**
12776        * <code>optional .Condition condition = 3;</code>
12777        */
12778       public boolean hasCondition() {
12779         return ((bitField0_ & 0x00000004) == 0x00000004);
12780       }
12781       /**
12782        * <code>optional .Condition condition = 3;</code>
12783        */
12784       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() {
12785         if (conditionBuilder_ == null) {
12786           return condition_;
12787         } else {
12788           return conditionBuilder_.getMessage();
12789         }
12790       }
12791       /**
12792        * <code>optional .Condition condition = 3;</code>
12793        */
12794       public Builder setCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) {
12795         if (conditionBuilder_ == null) {
12796           if (value == null) {
12797             throw new NullPointerException();
12798           }
12799           condition_ = value;
12800           onChanged();
12801         } else {
12802           conditionBuilder_.setMessage(value);
12803         }
12804         bitField0_ |= 0x00000004;
12805         return this;
12806       }
12807       /**
12808        * <code>optional .Condition condition = 3;</code>
12809        */
12810       public Builder setCondition(
12811           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder builderForValue) {
12812         if (conditionBuilder_ == null) {
12813           condition_ = builderForValue.build();
12814           onChanged();
12815         } else {
12816           conditionBuilder_.setMessage(builderForValue.build());
12817         }
12818         bitField0_ |= 0x00000004;
12819         return this;
12820       }
12821       /**
12822        * <code>optional .Condition condition = 3;</code>
12823        */
12824       public Builder mergeCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) {
12825         if (conditionBuilder_ == null) {
12826           if (((bitField0_ & 0x00000004) == 0x00000004) &&
12827               condition_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) {
12828             condition_ =
12829               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder(condition_).mergeFrom(value).buildPartial();
12830           } else {
12831             condition_ = value;
12832           }
12833           onChanged();
12834         } else {
12835           conditionBuilder_.mergeFrom(value);
12836         }
12837         bitField0_ |= 0x00000004;
12838         return this;
12839       }
12840       /**
12841        * <code>optional .Condition condition = 3;</code>
12842        */
12843       public Builder clearCondition() {
12844         if (conditionBuilder_ == null) {
12845           condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
12846           onChanged();
12847         } else {
12848           conditionBuilder_.clear();
12849         }
12850         bitField0_ = (bitField0_ & ~0x00000004);
12851         return this;
12852       }
12853       /**
12854        * <code>optional .Condition condition = 3;</code>
12855        */
12856       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder getConditionBuilder() {
12857         bitField0_ |= 0x00000004;
12858         onChanged();
12859         return getConditionFieldBuilder().getBuilder();
12860       }
12861       /**
12862        * <code>optional .Condition condition = 3;</code>
12863        */
12864       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() {
12865         if (conditionBuilder_ != null) {
12866           return conditionBuilder_.getMessageOrBuilder();
12867         } else {
12868           return condition_;
12869         }
12870       }
12871       /**
12872        * <code>optional .Condition condition = 3;</code>
12873        */
12874       private com.google.protobuf.SingleFieldBuilder<
12875           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder> 
12876           getConditionFieldBuilder() {
12877         if (conditionBuilder_ == null) {
12878           conditionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
12879               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder>(
12880                   condition_,
12881                   getParentForChildren(),
12882                   isClean());
12883           condition_ = null;
12884         }
12885         return conditionBuilder_;
12886       }
12887 
12888       // optional uint64 nonce_group = 4;
12889       private long nonceGroup_ ;
12890       /**
12891        * <code>optional uint64 nonce_group = 4;</code>
12892        */
12893       public boolean hasNonceGroup() {
12894         return ((bitField0_ & 0x00000008) == 0x00000008);
12895       }
12896       /**
12897        * <code>optional uint64 nonce_group = 4;</code>
12898        */
12899       public long getNonceGroup() {
12900         return nonceGroup_;
12901       }
12902       /**
12903        * <code>optional uint64 nonce_group = 4;</code>
12904        */
12905       public Builder setNonceGroup(long value) {
12906         bitField0_ |= 0x00000008;
12907         nonceGroup_ = value;
12908         onChanged();
12909         return this;
12910       }
12911       /**
12912        * <code>optional uint64 nonce_group = 4;</code>
12913        */
12914       public Builder clearNonceGroup() {
12915         bitField0_ = (bitField0_ & ~0x00000008);
12916         nonceGroup_ = 0L;
12917         onChanged();
12918         return this;
12919       }
12920 
12921       // @@protoc_insertion_point(builder_scope:MutateRequest)
12922     }
12923 
12924     static {
12925       defaultInstance = new MutateRequest(true);
12926       defaultInstance.initFields();
12927     }
12928 
12929     // @@protoc_insertion_point(class_scope:MutateRequest)
12930   }
12931 
12932   public interface MutateResponseOrBuilder
12933       extends com.google.protobuf.MessageOrBuilder {
12934 
12935     // optional .Result result = 1;
12936     /**
12937      * <code>optional .Result result = 1;</code>
12938      */
12939     boolean hasResult();
12940     /**
12941      * <code>optional .Result result = 1;</code>
12942      */
12943     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult();
12944     /**
12945      * <code>optional .Result result = 1;</code>
12946      */
12947     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder();
12948 
12949     // optional bool processed = 2;
12950     /**
12951      * <code>optional bool processed = 2;</code>
12952      *
12953      * <pre>
12954      * used for mutate to indicate processed only
12955      * </pre>
12956      */
12957     boolean hasProcessed();
12958     /**
12959      * <code>optional bool processed = 2;</code>
12960      *
12961      * <pre>
12962      * used for mutate to indicate processed only
12963      * </pre>
12964      */
12965     boolean getProcessed();
12966   }
12967   /**
12968    * Protobuf type {@code MutateResponse}
12969    */
12970   public static final class MutateResponse extends
12971       com.google.protobuf.GeneratedMessage
12972       implements MutateResponseOrBuilder {
12973     // Use MutateResponse.newBuilder() to construct.
12974     private MutateResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
12975       super(builder);
12976       this.unknownFields = builder.getUnknownFields();
12977     }
12978     private MutateResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
12979 
12980     private static final MutateResponse defaultInstance;
12981     public static MutateResponse getDefaultInstance() {
12982       return defaultInstance;
12983     }
12984 
12985     public MutateResponse getDefaultInstanceForType() {
12986       return defaultInstance;
12987     }
12988 
12989     private final com.google.protobuf.UnknownFieldSet unknownFields;
12990     @java.lang.Override
12991     public final com.google.protobuf.UnknownFieldSet
12992         getUnknownFields() {
12993       return this.unknownFields;
12994     }
12995     private MutateResponse(
12996         com.google.protobuf.CodedInputStream input,
12997         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12998         throws com.google.protobuf.InvalidProtocolBufferException {
12999       initFields();
13000       int mutable_bitField0_ = 0;
13001       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
13002           com.google.protobuf.UnknownFieldSet.newBuilder();
13003       try {
13004         boolean done = false;
13005         while (!done) {
13006           int tag = input.readTag();
13007           switch (tag) {
13008             case 0:
13009               done = true;
13010               break;
13011             default: {
13012               if (!parseUnknownField(input, unknownFields,
13013                                      extensionRegistry, tag)) {
13014                 done = true;
13015               }
13016               break;
13017             }
13018             case 10: {
13019               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = null;
13020               if (((bitField0_ & 0x00000001) == 0x00000001)) {
13021                 subBuilder = result_.toBuilder();
13022               }
13023               result_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry);
13024               if (subBuilder != null) {
13025                 subBuilder.mergeFrom(result_);
13026                 result_ = subBuilder.buildPartial();
13027               }
13028               bitField0_ |= 0x00000001;
13029               break;
13030             }
13031             case 16: {
13032               bitField0_ |= 0x00000002;
13033               processed_ = input.readBool();
13034               break;
13035             }
13036           }
13037         }
13038       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13039         throw e.setUnfinishedMessage(this);
13040       } catch (java.io.IOException e) {
13041         throw new com.google.protobuf.InvalidProtocolBufferException(
13042             e.getMessage()).setUnfinishedMessage(this);
13043       } finally {
13044         this.unknownFields = unknownFields.build();
13045         makeExtensionsImmutable();
13046       }
13047     }
13048     public static final com.google.protobuf.Descriptors.Descriptor
13049         getDescriptor() {
13050       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_descriptor;
13051     }
13052 
13053     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
13054         internalGetFieldAccessorTable() {
13055       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_fieldAccessorTable
13056           .ensureFieldAccessorsInitialized(
13057               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.Builder.class);
13058     }
13059 
13060     public static com.google.protobuf.Parser<MutateResponse> PARSER =
13061         new com.google.protobuf.AbstractParser<MutateResponse>() {
13062       public MutateResponse parsePartialFrom(
13063           com.google.protobuf.CodedInputStream input,
13064           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13065           throws com.google.protobuf.InvalidProtocolBufferException {
13066         return new MutateResponse(input, extensionRegistry);
13067       }
13068     };
13069 
13070     @java.lang.Override
13071     public com.google.protobuf.Parser<MutateResponse> getParserForType() {
13072       return PARSER;
13073     }
13074 
13075     private int bitField0_;
13076     // optional .Result result = 1;
13077     public static final int RESULT_FIELD_NUMBER = 1;
13078     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_;
13079     /**
13080      * <code>optional .Result result = 1;</code>
13081      */
13082     public boolean hasResult() {
13083       return ((bitField0_ & 0x00000001) == 0x00000001);
13084     }
13085     /**
13086      * <code>optional .Result result = 1;</code>
13087      */
13088     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() {
13089       return result_;
13090     }
13091     /**
13092      * <code>optional .Result result = 1;</code>
13093      */
13094     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() {
13095       return result_;
13096     }
13097 
13098     // optional bool processed = 2;
13099     public static final int PROCESSED_FIELD_NUMBER = 2;
13100     private boolean processed_;
13101     /**
13102      * <code>optional bool processed = 2;</code>
13103      *
13104      * <pre>
13105      * used for mutate to indicate processed only
13106      * </pre>
13107      */
13108     public boolean hasProcessed() {
13109       return ((bitField0_ & 0x00000002) == 0x00000002);
13110     }
13111     /**
13112      * <code>optional bool processed = 2;</code>
13113      *
13114      * <pre>
13115      * used for mutate to indicate processed only
13116      * </pre>
13117      */
13118     public boolean getProcessed() {
13119       return processed_;
13120     }
13121 
13122     private void initFields() {
13123       result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
13124       processed_ = false;
13125     }
13126     private byte memoizedIsInitialized = -1;
13127     public final boolean isInitialized() {
13128       byte isInitialized = memoizedIsInitialized;
13129       if (isInitialized != -1) return isInitialized == 1;
13130 
13131       memoizedIsInitialized = 1;
13132       return true;
13133     }
13134 
13135     public void writeTo(com.google.protobuf.CodedOutputStream output)
13136                         throws java.io.IOException {
13137       getSerializedSize();
13138       if (((bitField0_ & 0x00000001) == 0x00000001)) {
13139         output.writeMessage(1, result_);
13140       }
13141       if (((bitField0_ & 0x00000002) == 0x00000002)) {
13142         output.writeBool(2, processed_);
13143       }
13144       getUnknownFields().writeTo(output);
13145     }
13146 
13147     private int memoizedSerializedSize = -1;
13148     public int getSerializedSize() {
13149       int size = memoizedSerializedSize;
13150       if (size != -1) return size;
13151 
13152       size = 0;
13153       if (((bitField0_ & 0x00000001) == 0x00000001)) {
13154         size += com.google.protobuf.CodedOutputStream
13155           .computeMessageSize(1, result_);
13156       }
13157       if (((bitField0_ & 0x00000002) == 0x00000002)) {
13158         size += com.google.protobuf.CodedOutputStream
13159           .computeBoolSize(2, processed_);
13160       }
13161       size += getUnknownFields().getSerializedSize();
13162       memoizedSerializedSize = size;
13163       return size;
13164     }
13165 
13166     private static final long serialVersionUID = 0L;
13167     @java.lang.Override
13168     protected java.lang.Object writeReplace()
13169         throws java.io.ObjectStreamException {
13170       return super.writeReplace();
13171     }
13172 
13173     @java.lang.Override
13174     public boolean equals(final java.lang.Object obj) {
13175       if (obj == this) {
13176        return true;
13177       }
13178       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse)) {
13179         return super.equals(obj);
13180       }
13181       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) obj;
13182 
13183       boolean result = true;
13184       result = result && (hasResult() == other.hasResult());
13185       if (hasResult()) {
13186         result = result && getResult()
13187             .equals(other.getResult());
13188       }
13189       result = result && (hasProcessed() == other.hasProcessed());
13190       if (hasProcessed()) {
13191         result = result && (getProcessed()
13192             == other.getProcessed());
13193       }
13194       result = result &&
13195           getUnknownFields().equals(other.getUnknownFields());
13196       return result;
13197     }
13198 
13199     private int memoizedHashCode = 0;
13200     @java.lang.Override
13201     public int hashCode() {
13202       if (memoizedHashCode != 0) {
13203         return memoizedHashCode;
13204       }
13205       int hash = 41;
13206       hash = (19 * hash) + getDescriptorForType().hashCode();
13207       if (hasResult()) {
13208         hash = (37 * hash) + RESULT_FIELD_NUMBER;
13209         hash = (53 * hash) + getResult().hashCode();
13210       }
13211       if (hasProcessed()) {
13212         hash = (37 * hash) + PROCESSED_FIELD_NUMBER;
13213         hash = (53 * hash) + hashBoolean(getProcessed());
13214       }
13215       hash = (29 * hash) + getUnknownFields().hashCode();
13216       memoizedHashCode = hash;
13217       return hash;
13218     }
13219 
13220     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(
13221         com.google.protobuf.ByteString data)
13222         throws com.google.protobuf.InvalidProtocolBufferException {
13223       return PARSER.parseFrom(data);
13224     }
13225     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(
13226         com.google.protobuf.ByteString data,
13227         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13228         throws com.google.protobuf.InvalidProtocolBufferException {
13229       return PARSER.parseFrom(data, extensionRegistry);
13230     }
13231     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(byte[] data)
13232         throws com.google.protobuf.InvalidProtocolBufferException {
13233       return PARSER.parseFrom(data);
13234     }
13235     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(
13236         byte[] data,
13237         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13238         throws com.google.protobuf.InvalidProtocolBufferException {
13239       return PARSER.parseFrom(data, extensionRegistry);
13240     }
13241     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(java.io.InputStream input)
13242         throws java.io.IOException {
13243       return PARSER.parseFrom(input);
13244     }
13245     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(
13246         java.io.InputStream input,
13247         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13248         throws java.io.IOException {
13249       return PARSER.parseFrom(input, extensionRegistry);
13250     }
13251     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseDelimitedFrom(java.io.InputStream input)
13252         throws java.io.IOException {
13253       return PARSER.parseDelimitedFrom(input);
13254     }
13255     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseDelimitedFrom(
13256         java.io.InputStream input,
13257         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13258         throws java.io.IOException {
13259       return PARSER.parseDelimitedFrom(input, extensionRegistry);
13260     }
13261     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(
13262         com.google.protobuf.CodedInputStream input)
13263         throws java.io.IOException {
13264       return PARSER.parseFrom(input);
13265     }
13266     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(
13267         com.google.protobuf.CodedInputStream input,
13268         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13269         throws java.io.IOException {
13270       return PARSER.parseFrom(input, extensionRegistry);
13271     }
13272 
13273     public static Builder newBuilder() { return Builder.create(); }
13274     public Builder newBuilderForType() { return newBuilder(); }
13275     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse prototype) {
13276       return newBuilder().mergeFrom(prototype);
13277     }
13278     public Builder toBuilder() { return newBuilder(this); }
13279 
13280     @java.lang.Override
13281     protected Builder newBuilderForType(
13282         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
13283       Builder builder = new Builder(parent);
13284       return builder;
13285     }
13286     /**
13287      * Protobuf type {@code MutateResponse}
13288      */
13289     public static final class Builder extends
13290         com.google.protobuf.GeneratedMessage.Builder<Builder>
13291        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponseOrBuilder {
13292       public static final com.google.protobuf.Descriptors.Descriptor
13293           getDescriptor() {
13294         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_descriptor;
13295       }
13296 
13297       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
13298           internalGetFieldAccessorTable() {
13299         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_fieldAccessorTable
13300             .ensureFieldAccessorsInitialized(
13301                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.Builder.class);
13302       }
13303 
13304       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.newBuilder()
13305       private Builder() {
13306         maybeForceBuilderInitialization();
13307       }
13308 
13309       private Builder(
13310           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
13311         super(parent);
13312         maybeForceBuilderInitialization();
13313       }
13314       private void maybeForceBuilderInitialization() {
13315         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
13316           getResultFieldBuilder();
13317         }
13318       }
13319       private static Builder create() {
13320         return new Builder();
13321       }
13322 
13323       public Builder clear() {
13324         super.clear();
13325         if (resultBuilder_ == null) {
13326           result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
13327         } else {
13328           resultBuilder_.clear();
13329         }
13330         bitField0_ = (bitField0_ & ~0x00000001);
13331         processed_ = false;
13332         bitField0_ = (bitField0_ & ~0x00000002);
13333         return this;
13334       }
13335 
13336       public Builder clone() {
13337         return create().mergeFrom(buildPartial());
13338       }
13339 
13340       public com.google.protobuf.Descriptors.Descriptor
13341           getDescriptorForType() {
13342         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_descriptor;
13343       }
13344 
13345       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse getDefaultInstanceForType() {
13346         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance();
13347       }
13348 
13349       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse build() {
13350         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = buildPartial();
13351         if (!result.isInitialized()) {
13352           throw newUninitializedMessageException(result);
13353         }
13354         return result;
13355       }
13356 
13357       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse buildPartial() {
13358         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse(this);
13359         int from_bitField0_ = bitField0_;
13360         int to_bitField0_ = 0;
13361         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
13362           to_bitField0_ |= 0x00000001;
13363         }
13364         if (resultBuilder_ == null) {
13365           result.result_ = result_;
13366         } else {
13367           result.result_ = resultBuilder_.build();
13368         }
13369         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
13370           to_bitField0_ |= 0x00000002;
13371         }
13372         result.processed_ = processed_;
13373         result.bitField0_ = to_bitField0_;
13374         onBuilt();
13375         return result;
13376       }
13377 
13378       public Builder mergeFrom(com.google.protobuf.Message other) {
13379         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) {
13380           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse)other);
13381         } else {
13382           super.mergeFrom(other);
13383           return this;
13384         }
13385       }
13386 
13387       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse other) {
13388         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance()) return this;
13389         if (other.hasResult()) {
13390           mergeResult(other.getResult());
13391         }
13392         if (other.hasProcessed()) {
13393           setProcessed(other.getProcessed());
13394         }
13395         this.mergeUnknownFields(other.getUnknownFields());
13396         return this;
13397       }
13398 
13399       public final boolean isInitialized() {
13400         return true;
13401       }
13402 
13403       public Builder mergeFrom(
13404           com.google.protobuf.CodedInputStream input,
13405           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13406           throws java.io.IOException {
13407         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parsedMessage = null;
13408         try {
13409           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
13410         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13411           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) e.getUnfinishedMessage();
13412           throw e;
13413         } finally {
13414           if (parsedMessage != null) {
13415             mergeFrom(parsedMessage);
13416           }
13417         }
13418         return this;
13419       }
13420       private int bitField0_;
13421 
13422       // optional .Result result = 1;
13423       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
13424       private com.google.protobuf.SingleFieldBuilder<
13425           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_;
13426       /**
13427        * <code>optional .Result result = 1;</code>
13428        */
13429       public boolean hasResult() {
13430         return ((bitField0_ & 0x00000001) == 0x00000001);
13431       }
13432       /**
13433        * <code>optional .Result result = 1;</code>
13434        */
13435       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() {
13436         if (resultBuilder_ == null) {
13437           return result_;
13438         } else {
13439           return resultBuilder_.getMessage();
13440         }
13441       }
13442       /**
13443        * <code>optional .Result result = 1;</code>
13444        */
13445       public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
13446         if (resultBuilder_ == null) {
13447           if (value == null) {
13448             throw new NullPointerException();
13449           }
13450           result_ = value;
13451           onChanged();
13452         } else {
13453           resultBuilder_.setMessage(value);
13454         }
13455         bitField0_ |= 0x00000001;
13456         return this;
13457       }
13458       /**
13459        * <code>optional .Result result = 1;</code>
13460        */
13461       public Builder setResult(
13462           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
13463         if (resultBuilder_ == null) {
13464           result_ = builderForValue.build();
13465           onChanged();
13466         } else {
13467           resultBuilder_.setMessage(builderForValue.build());
13468         }
13469         bitField0_ |= 0x00000001;
13470         return this;
13471       }
13472       /**
13473        * <code>optional .Result result = 1;</code>
13474        */
13475       public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
13476         if (resultBuilder_ == null) {
13477           if (((bitField0_ & 0x00000001) == 0x00000001) &&
13478               result_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) {
13479             result_ =
13480               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial();
13481           } else {
13482             result_ = value;
13483           }
13484           onChanged();
13485         } else {
13486           resultBuilder_.mergeFrom(value);
13487         }
13488         bitField0_ |= 0x00000001;
13489         return this;
13490       }
13491       /**
13492        * <code>optional .Result result = 1;</code>
13493        */
13494       public Builder clearResult() {
13495         if (resultBuilder_ == null) {
13496           result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
13497           onChanged();
13498         } else {
13499           resultBuilder_.clear();
13500         }
13501         bitField0_ = (bitField0_ & ~0x00000001);
13502         return this;
13503       }
13504       /**
13505        * <code>optional .Result result = 1;</code>
13506        */
13507       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() {
13508         bitField0_ |= 0x00000001;
13509         onChanged();
13510         return getResultFieldBuilder().getBuilder();
13511       }
13512       /**
13513        * <code>optional .Result result = 1;</code>
13514        */
13515       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() {
13516         if (resultBuilder_ != null) {
13517           return resultBuilder_.getMessageOrBuilder();
13518         } else {
13519           return result_;
13520         }
13521       }
13522       /**
13523        * <code>optional .Result result = 1;</code>
13524        */
13525       private com.google.protobuf.SingleFieldBuilder<
13526           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> 
13527           getResultFieldBuilder() {
13528         if (resultBuilder_ == null) {
13529           resultBuilder_ = new com.google.protobuf.SingleFieldBuilder<
13530               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>(
13531                   result_,
13532                   getParentForChildren(),
13533                   isClean());
13534           result_ = null;
13535         }
13536         return resultBuilder_;
13537       }
13538 
13539       // optional bool processed = 2;
13540       private boolean processed_ ;
13541       /**
13542        * <code>optional bool processed = 2;</code>
13543        *
13544        * <pre>
13545        * used for mutate to indicate processed only
13546        * </pre>
13547        */
13548       public boolean hasProcessed() {
13549         return ((bitField0_ & 0x00000002) == 0x00000002);
13550       }
13551       /**
13552        * <code>optional bool processed = 2;</code>
13553        *
13554        * <pre>
13555        * used for mutate to indicate processed only
13556        * </pre>
13557        */
13558       public boolean getProcessed() {
13559         return processed_;
13560       }
13561       /**
13562        * <code>optional bool processed = 2;</code>
13563        *
13564        * <pre>
13565        * used for mutate to indicate processed only
13566        * </pre>
13567        */
13568       public Builder setProcessed(boolean value) {
13569         bitField0_ |= 0x00000002;
13570         processed_ = value;
13571         onChanged();
13572         return this;
13573       }
13574       /**
13575        * <code>optional bool processed = 2;</code>
13576        *
13577        * <pre>
13578        * used for mutate to indicate processed only
13579        * </pre>
13580        */
13581       public Builder clearProcessed() {
13582         bitField0_ = (bitField0_ & ~0x00000002);
13583         processed_ = false;
13584         onChanged();
13585         return this;
13586       }
13587 
13588       // @@protoc_insertion_point(builder_scope:MutateResponse)
13589     }
13590 
13591     static {
13592       defaultInstance = new MutateResponse(true);
13593       defaultInstance.initFields();
13594     }
13595 
13596     // @@protoc_insertion_point(class_scope:MutateResponse)
13597   }
13598 
13599   public interface ScanOrBuilder
13600       extends com.google.protobuf.MessageOrBuilder {
13601 
13602     // repeated .Column column = 1;
13603     /**
13604      * <code>repeated .Column column = 1;</code>
13605      */
13606     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> 
13607         getColumnList();
13608     /**
13609      * <code>repeated .Column column = 1;</code>
13610      */
13611     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index);
13612     /**
13613      * <code>repeated .Column column = 1;</code>
13614      */
13615     int getColumnCount();
13616     /**
13617      * <code>repeated .Column column = 1;</code>
13618      */
13619     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> 
13620         getColumnOrBuilderList();
13621     /**
13622      * <code>repeated .Column column = 1;</code>
13623      */
13624     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder(
13625         int index);
13626 
13627     // repeated .NameBytesPair attribute = 2;
13628     /**
13629      * <code>repeated .NameBytesPair attribute = 2;</code>
13630      */
13631     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> 
13632         getAttributeList();
13633     /**
13634      * <code>repeated .NameBytesPair attribute = 2;</code>
13635      */
13636     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index);
13637     /**
13638      * <code>repeated .NameBytesPair attribute = 2;</code>
13639      */
13640     int getAttributeCount();
13641     /**
13642      * <code>repeated .NameBytesPair attribute = 2;</code>
13643      */
13644     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
13645         getAttributeOrBuilderList();
13646     /**
13647      * <code>repeated .NameBytesPair attribute = 2;</code>
13648      */
13649     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
13650         int index);
13651 
13652     // optional bytes start_row = 3;
13653     /**
13654      * <code>optional bytes start_row = 3;</code>
13655      */
13656     boolean hasStartRow();
13657     /**
13658      * <code>optional bytes start_row = 3;</code>
13659      */
13660     com.google.protobuf.ByteString getStartRow();
13661 
13662     // optional bytes stop_row = 4;
13663     /**
13664      * <code>optional bytes stop_row = 4;</code>
13665      */
13666     boolean hasStopRow();
13667     /**
13668      * <code>optional bytes stop_row = 4;</code>
13669      */
13670     com.google.protobuf.ByteString getStopRow();
13671 
13672     // optional .Filter filter = 5;
13673     /**
13674      * <code>optional .Filter filter = 5;</code>
13675      */
13676     boolean hasFilter();
13677     /**
13678      * <code>optional .Filter filter = 5;</code>
13679      */
13680     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter();
13681     /**
13682      * <code>optional .Filter filter = 5;</code>
13683      */
13684     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder();
13685 
13686     // optional .TimeRange time_range = 6;
13687     /**
13688      * <code>optional .TimeRange time_range = 6;</code>
13689      */
13690     boolean hasTimeRange();
13691     /**
13692      * <code>optional .TimeRange time_range = 6;</code>
13693      */
13694     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange();
13695     /**
13696      * <code>optional .TimeRange time_range = 6;</code>
13697      */
13698     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder();
13699 
13700     // optional uint32 max_versions = 7 [default = 1];
13701     /**
13702      * <code>optional uint32 max_versions = 7 [default = 1];</code>
13703      */
13704     boolean hasMaxVersions();
13705     /**
13706      * <code>optional uint32 max_versions = 7 [default = 1];</code>
13707      */
13708     int getMaxVersions();
13709 
13710     // optional bool cache_blocks = 8 [default = true];
13711     /**
13712      * <code>optional bool cache_blocks = 8 [default = true];</code>
13713      */
13714     boolean hasCacheBlocks();
13715     /**
13716      * <code>optional bool cache_blocks = 8 [default = true];</code>
13717      */
13718     boolean getCacheBlocks();
13719 
13720     // optional uint32 batch_size = 9;
13721     /**
13722      * <code>optional uint32 batch_size = 9;</code>
13723      */
13724     boolean hasBatchSize();
13725     /**
13726      * <code>optional uint32 batch_size = 9;</code>
13727      */
13728     int getBatchSize();
13729 
13730     // optional uint64 max_result_size = 10;
13731     /**
13732      * <code>optional uint64 max_result_size = 10;</code>
13733      */
13734     boolean hasMaxResultSize();
13735     /**
13736      * <code>optional uint64 max_result_size = 10;</code>
13737      */
13738     long getMaxResultSize();
13739 
13740     // optional uint32 store_limit = 11;
13741     /**
13742      * <code>optional uint32 store_limit = 11;</code>
13743      */
13744     boolean hasStoreLimit();
13745     /**
13746      * <code>optional uint32 store_limit = 11;</code>
13747      */
13748     int getStoreLimit();
13749 
13750     // optional uint32 store_offset = 12;
13751     /**
13752      * <code>optional uint32 store_offset = 12;</code>
13753      */
13754     boolean hasStoreOffset();
13755     /**
13756      * <code>optional uint32 store_offset = 12;</code>
13757      */
13758     int getStoreOffset();
13759 
13760     // optional bool load_column_families_on_demand = 13;
13761     /**
13762      * <code>optional bool load_column_families_on_demand = 13;</code>
13763      *
13764      * <pre>
13765      * DO NOT add defaults to load_column_families_on_demand. 
13766      * </pre>
13767      */
13768     boolean hasLoadColumnFamiliesOnDemand();
13769     /**
13770      * <code>optional bool load_column_families_on_demand = 13;</code>
13771      *
13772      * <pre>
13773      * DO NOT add defaults to load_column_families_on_demand. 
13774      * </pre>
13775      */
13776     boolean getLoadColumnFamiliesOnDemand();
13777 
13778     // optional bool small = 14;
13779     /**
13780      * <code>optional bool small = 14;</code>
13781      */
13782     boolean hasSmall();
13783     /**
13784      * <code>optional bool small = 14;</code>
13785      */
13786     boolean getSmall();
13787 
13788     // optional bool reversed = 15 [default = false];
13789     /**
13790      * <code>optional bool reversed = 15 [default = false];</code>
13791      */
13792     boolean hasReversed();
13793     /**
13794      * <code>optional bool reversed = 15 [default = false];</code>
13795      */
13796     boolean getReversed();
13797 
13798     // optional .Consistency consistency = 16 [default = STRONG];
13799     /**
13800      * <code>optional .Consistency consistency = 16 [default = STRONG];</code>
13801      */
13802     boolean hasConsistency();
13803     /**
13804      * <code>optional .Consistency consistency = 16 [default = STRONG];</code>
13805      */
13806     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency();
13807 
13808     // optional uint32 caching = 17;
13809     /**
13810      * <code>optional uint32 caching = 17;</code>
13811      */
13812     boolean hasCaching();
13813     /**
13814      * <code>optional uint32 caching = 17;</code>
13815      */
13816     int getCaching();
13817   }
13818   /**
13819    * Protobuf type {@code Scan}
13820    *
13821    * <pre>
13822    **
13823    * Instead of get from a table, you can scan it with optional filters.
13824    * You can specify the row key range, time range, the columns/families
13825    * to scan and so on.
13826    *
13827    * This scan is used the first time in a scan request. The response of
13828    * the initial scan will return a scanner id, which should be used to
13829    * fetch result batches later on before it is closed.
13830    * </pre>
13831    */
13832   public static final class Scan extends
13833       com.google.protobuf.GeneratedMessage
13834       implements ScanOrBuilder {
13835     // Use Scan.newBuilder() to construct.
13836     private Scan(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
13837       super(builder);
13838       this.unknownFields = builder.getUnknownFields();
13839     }
13840     private Scan(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
13841 
13842     private static final Scan defaultInstance;
13843     public static Scan getDefaultInstance() {
13844       return defaultInstance;
13845     }
13846 
13847     public Scan getDefaultInstanceForType() {
13848       return defaultInstance;
13849     }
13850 
13851     private final com.google.protobuf.UnknownFieldSet unknownFields;
13852     @java.lang.Override
13853     public final com.google.protobuf.UnknownFieldSet
13854         getUnknownFields() {
13855       return this.unknownFields;
13856     }
13857     private Scan(
13858         com.google.protobuf.CodedInputStream input,
13859         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13860         throws com.google.protobuf.InvalidProtocolBufferException {
13861       initFields();
13862       int mutable_bitField0_ = 0;
13863       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
13864           com.google.protobuf.UnknownFieldSet.newBuilder();
13865       try {
13866         boolean done = false;
13867         while (!done) {
13868           int tag = input.readTag();
13869           switch (tag) {
13870             case 0:
13871               done = true;
13872               break;
13873             default: {
13874               if (!parseUnknownField(input, unknownFields,
13875                                      extensionRegistry, tag)) {
13876                 done = true;
13877               }
13878               break;
13879             }
13880             case 10: {
13881               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
13882                 column_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column>();
13883                 mutable_bitField0_ |= 0x00000001;
13884               }
13885               column_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.PARSER, extensionRegistry));
13886               break;
13887             }
13888             case 18: {
13889               if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
13890                 attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>();
13891                 mutable_bitField0_ |= 0x00000002;
13892               }
13893               attribute_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry));
13894               break;
13895             }
13896             case 26: {
13897               bitField0_ |= 0x00000001;
13898               startRow_ = input.readBytes();
13899               break;
13900             }
13901             case 34: {
13902               bitField0_ |= 0x00000002;
13903               stopRow_ = input.readBytes();
13904               break;
13905             }
13906             case 42: {
13907               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder subBuilder = null;
13908               if (((bitField0_ & 0x00000004) == 0x00000004)) {
13909                 subBuilder = filter_.toBuilder();
13910               }
13911               filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry);
13912               if (subBuilder != null) {
13913                 subBuilder.mergeFrom(filter_);
13914                 filter_ = subBuilder.buildPartial();
13915               }
13916               bitField0_ |= 0x00000004;
13917               break;
13918             }
13919             case 50: {
13920               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null;
13921               if (((bitField0_ & 0x00000008) == 0x00000008)) {
13922                 subBuilder = timeRange_.toBuilder();
13923               }
13924               timeRange_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry);
13925               if (subBuilder != null) {
13926                 subBuilder.mergeFrom(timeRange_);
13927                 timeRange_ = subBuilder.buildPartial();
13928               }
13929               bitField0_ |= 0x00000008;
13930               break;
13931             }
13932             case 56: {
13933               bitField0_ |= 0x00000010;
13934               maxVersions_ = input.readUInt32();
13935               break;
13936             }
13937             case 64: {
13938               bitField0_ |= 0x00000020;
13939               cacheBlocks_ = input.readBool();
13940               break;
13941             }
13942             case 72: {
13943               bitField0_ |= 0x00000040;
13944               batchSize_ = input.readUInt32();
13945               break;
13946             }
13947             case 80: {
13948               bitField0_ |= 0x00000080;
13949               maxResultSize_ = input.readUInt64();
13950               break;
13951             }
13952             case 88: {
13953               bitField0_ |= 0x00000100;
13954               storeLimit_ = input.readUInt32();
13955               break;
13956             }
13957             case 96: {
13958               bitField0_ |= 0x00000200;
13959               storeOffset_ = input.readUInt32();
13960               break;
13961             }
13962             case 104: {
13963               bitField0_ |= 0x00000400;
13964               loadColumnFamiliesOnDemand_ = input.readBool();
13965               break;
13966             }
13967             case 112: {
13968               bitField0_ |= 0x00000800;
13969               small_ = input.readBool();
13970               break;
13971             }
13972             case 120: {
13973               bitField0_ |= 0x00001000;
13974               reversed_ = input.readBool();
13975               break;
13976             }
13977             case 128: {
13978               int rawValue = input.readEnum();
13979               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.valueOf(rawValue);
13980               if (value == null) {
13981                 unknownFields.mergeVarintField(16, rawValue);
13982               } else {
13983                 bitField0_ |= 0x00002000;
13984                 consistency_ = value;
13985               }
13986               break;
13987             }
13988             case 136: {
13989               bitField0_ |= 0x00004000;
13990               caching_ = input.readUInt32();
13991               break;
13992             }
13993           }
13994         }
13995       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13996         throw e.setUnfinishedMessage(this);
13997       } catch (java.io.IOException e) {
13998         throw new com.google.protobuf.InvalidProtocolBufferException(
13999             e.getMessage()).setUnfinishedMessage(this);
14000       } finally {
14001         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
14002           column_ = java.util.Collections.unmodifiableList(column_);
14003         }
14004         if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
14005           attribute_ = java.util.Collections.unmodifiableList(attribute_);
14006         }
14007         this.unknownFields = unknownFields.build();
14008         makeExtensionsImmutable();
14009       }
14010     }
14011     public static final com.google.protobuf.Descriptors.Descriptor
14012         getDescriptor() {
14013       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_descriptor;
14014     }
14015 
14016     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
14017         internalGetFieldAccessorTable() {
14018       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_fieldAccessorTable
14019           .ensureFieldAccessorsInitialized(
14020               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder.class);
14021     }
14022 
14023     public static com.google.protobuf.Parser<Scan> PARSER =
14024         new com.google.protobuf.AbstractParser<Scan>() {
14025       public Scan parsePartialFrom(
14026           com.google.protobuf.CodedInputStream input,
14027           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14028           throws com.google.protobuf.InvalidProtocolBufferException {
14029         return new Scan(input, extensionRegistry);
14030       }
14031     };
14032 
14033     @java.lang.Override
14034     public com.google.protobuf.Parser<Scan> getParserForType() {
14035       return PARSER;
14036     }
14037 
14038     private int bitField0_;
14039     // repeated .Column column = 1;
14040     public static final int COLUMN_FIELD_NUMBER = 1;
14041     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> column_;
14042     /**
14043      * <code>repeated .Column column = 1;</code>
14044      */
14045     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> getColumnList() {
14046       return column_;
14047     }
14048     /**
14049      * <code>repeated .Column column = 1;</code>
14050      */
14051     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> 
14052         getColumnOrBuilderList() {
14053       return column_;
14054     }
14055     /**
14056      * <code>repeated .Column column = 1;</code>
14057      */
14058     public int getColumnCount() {
14059       return column_.size();
14060     }
14061     /**
14062      * <code>repeated .Column column = 1;</code>
14063      */
14064     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) {
14065       return column_.get(index);
14066     }
14067     /**
14068      * <code>repeated .Column column = 1;</code>
14069      */
14070     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder(
14071         int index) {
14072       return column_.get(index);
14073     }
14074 
14075     // repeated .NameBytesPair attribute = 2;
14076     public static final int ATTRIBUTE_FIELD_NUMBER = 2;
14077     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_;
14078     /**
14079      * <code>repeated .NameBytesPair attribute = 2;</code>
14080      */
14081     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() {
14082       return attribute_;
14083     }
14084     /**
14085      * <code>repeated .NameBytesPair attribute = 2;</code>
14086      */
14087     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
14088         getAttributeOrBuilderList() {
14089       return attribute_;
14090     }
14091     /**
14092      * <code>repeated .NameBytesPair attribute = 2;</code>
14093      */
14094     public int getAttributeCount() {
14095       return attribute_.size();
14096     }
14097     /**
14098      * <code>repeated .NameBytesPair attribute = 2;</code>
14099      */
14100     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) {
14101       return attribute_.get(index);
14102     }
14103     /**
14104      * <code>repeated .NameBytesPair attribute = 2;</code>
14105      */
14106     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
14107         int index) {
14108       return attribute_.get(index);
14109     }
14110 
14111     // optional bytes start_row = 3;
14112     public static final int START_ROW_FIELD_NUMBER = 3;
14113     private com.google.protobuf.ByteString startRow_;
14114     /**
14115      * <code>optional bytes start_row = 3;</code>
14116      */
14117     public boolean hasStartRow() {
14118       return ((bitField0_ & 0x00000001) == 0x00000001);
14119     }
14120     /**
14121      * <code>optional bytes start_row = 3;</code>
14122      */
14123     public com.google.protobuf.ByteString getStartRow() {
14124       return startRow_;
14125     }
14126 
14127     // optional bytes stop_row = 4;
14128     public static final int STOP_ROW_FIELD_NUMBER = 4;
14129     private com.google.protobuf.ByteString stopRow_;
14130     /**
14131      * <code>optional bytes stop_row = 4;</code>
14132      */
14133     public boolean hasStopRow() {
14134       return ((bitField0_ & 0x00000002) == 0x00000002);
14135     }
14136     /**
14137      * <code>optional bytes stop_row = 4;</code>
14138      */
14139     public com.google.protobuf.ByteString getStopRow() {
14140       return stopRow_;
14141     }
14142 
14143     // optional .Filter filter = 5;
14144     public static final int FILTER_FIELD_NUMBER = 5;
14145     private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_;
14146     /**
14147      * <code>optional .Filter filter = 5;</code>
14148      */
14149     public boolean hasFilter() {
14150       return ((bitField0_ & 0x00000004) == 0x00000004);
14151     }
14152     /**
14153      * <code>optional .Filter filter = 5;</code>
14154      */
14155     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
14156       return filter_;
14157     }
14158     /**
14159      * <code>optional .Filter filter = 5;</code>
14160      */
14161     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
14162       return filter_;
14163     }
14164 
14165     // optional .TimeRange time_range = 6;
14166     public static final int TIME_RANGE_FIELD_NUMBER = 6;
14167     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_;
14168     /**
14169      * <code>optional .TimeRange time_range = 6;</code>
14170      */
14171     public boolean hasTimeRange() {
14172       return ((bitField0_ & 0x00000008) == 0x00000008);
14173     }
14174     /**
14175      * <code>optional .TimeRange time_range = 6;</code>
14176      */
14177     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() {
14178       return timeRange_;
14179     }
14180     /**
14181      * <code>optional .TimeRange time_range = 6;</code>
14182      */
14183     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() {
14184       return timeRange_;
14185     }
14186 
14187     // optional uint32 max_versions = 7 [default = 1];
14188     public static final int MAX_VERSIONS_FIELD_NUMBER = 7;
14189     private int maxVersions_;
14190     /**
14191      * <code>optional uint32 max_versions = 7 [default = 1];</code>
14192      */
14193     public boolean hasMaxVersions() {
14194       return ((bitField0_ & 0x00000010) == 0x00000010);
14195     }
14196     /**
14197      * <code>optional uint32 max_versions = 7 [default = 1];</code>
14198      */
14199     public int getMaxVersions() {
14200       return maxVersions_;
14201     }
14202 
14203     // optional bool cache_blocks = 8 [default = true];
14204     public static final int CACHE_BLOCKS_FIELD_NUMBER = 8;
14205     private boolean cacheBlocks_;
14206     /**
14207      * <code>optional bool cache_blocks = 8 [default = true];</code>
14208      */
14209     public boolean hasCacheBlocks() {
14210       return ((bitField0_ & 0x00000020) == 0x00000020);
14211     }
14212     /**
14213      * <code>optional bool cache_blocks = 8 [default = true];</code>
14214      */
14215     public boolean getCacheBlocks() {
14216       return cacheBlocks_;
14217     }
14218 
14219     // optional uint32 batch_size = 9;
14220     public static final int BATCH_SIZE_FIELD_NUMBER = 9;
14221     private int batchSize_;
14222     /**
14223      * <code>optional uint32 batch_size = 9;</code>
14224      */
14225     public boolean hasBatchSize() {
14226       return ((bitField0_ & 0x00000040) == 0x00000040);
14227     }
14228     /**
14229      * <code>optional uint32 batch_size = 9;</code>
14230      */
14231     public int getBatchSize() {
14232       return batchSize_;
14233     }
14234 
14235     // optional uint64 max_result_size = 10;
14236     public static final int MAX_RESULT_SIZE_FIELD_NUMBER = 10;
14237     private long maxResultSize_;
14238     /**
14239      * <code>optional uint64 max_result_size = 10;</code>
14240      */
14241     public boolean hasMaxResultSize() {
14242       return ((bitField0_ & 0x00000080) == 0x00000080);
14243     }
14244     /**
14245      * <code>optional uint64 max_result_size = 10;</code>
14246      */
14247     public long getMaxResultSize() {
14248       return maxResultSize_;
14249     }
14250 
14251     // optional uint32 store_limit = 11;
14252     public static final int STORE_LIMIT_FIELD_NUMBER = 11;
14253     private int storeLimit_;
14254     /**
14255      * <code>optional uint32 store_limit = 11;</code>
14256      */
14257     public boolean hasStoreLimit() {
14258       return ((bitField0_ & 0x00000100) == 0x00000100);
14259     }
14260     /**
14261      * <code>optional uint32 store_limit = 11;</code>
14262      */
14263     public int getStoreLimit() {
14264       return storeLimit_;
14265     }
14266 
14267     // optional uint32 store_offset = 12;
14268     public static final int STORE_OFFSET_FIELD_NUMBER = 12;
14269     private int storeOffset_;
14270     /**
14271      * <code>optional uint32 store_offset = 12;</code>
14272      */
14273     public boolean hasStoreOffset() {
14274       return ((bitField0_ & 0x00000200) == 0x00000200);
14275     }
14276     /**
14277      * <code>optional uint32 store_offset = 12;</code>
14278      */
14279     public int getStoreOffset() {
14280       return storeOffset_;
14281     }
14282 
14283     // optional bool load_column_families_on_demand = 13;
14284     public static final int LOAD_COLUMN_FAMILIES_ON_DEMAND_FIELD_NUMBER = 13;
14285     private boolean loadColumnFamiliesOnDemand_;
14286     /**
14287      * <code>optional bool load_column_families_on_demand = 13;</code>
14288      *
14289      * <pre>
14290      * DO NOT add defaults to load_column_families_on_demand. 
14291      * </pre>
14292      */
14293     public boolean hasLoadColumnFamiliesOnDemand() {
14294       return ((bitField0_ & 0x00000400) == 0x00000400);
14295     }
14296     /**
14297      * <code>optional bool load_column_families_on_demand = 13;</code>
14298      *
14299      * <pre>
14300      * DO NOT add defaults to load_column_families_on_demand. 
14301      * </pre>
14302      */
14303     public boolean getLoadColumnFamiliesOnDemand() {
14304       return loadColumnFamiliesOnDemand_;
14305     }
14306 
14307     // optional bool small = 14;
14308     public static final int SMALL_FIELD_NUMBER = 14;
14309     private boolean small_;
14310     /**
14311      * <code>optional bool small = 14;</code>
14312      */
14313     public boolean hasSmall() {
14314       return ((bitField0_ & 0x00000800) == 0x00000800);
14315     }
14316     /**
14317      * <code>optional bool small = 14;</code>
14318      */
14319     public boolean getSmall() {
14320       return small_;
14321     }
14322 
14323     // optional bool reversed = 15 [default = false];
14324     public static final int REVERSED_FIELD_NUMBER = 15;
14325     private boolean reversed_;
14326     /**
14327      * <code>optional bool reversed = 15 [default = false];</code>
14328      */
14329     public boolean hasReversed() {
14330       return ((bitField0_ & 0x00001000) == 0x00001000);
14331     }
14332     /**
14333      * <code>optional bool reversed = 15 [default = false];</code>
14334      */
14335     public boolean getReversed() {
14336       return reversed_;
14337     }
14338 
14339     // optional .Consistency consistency = 16 [default = STRONG];
14340     public static final int CONSISTENCY_FIELD_NUMBER = 16;
14341     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency consistency_;
14342     /**
14343      * <code>optional .Consistency consistency = 16 [default = STRONG];</code>
14344      */
14345     public boolean hasConsistency() {
14346       return ((bitField0_ & 0x00002000) == 0x00002000);
14347     }
14348     /**
14349      * <code>optional .Consistency consistency = 16 [default = STRONG];</code>
14350      */
14351     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency() {
14352       return consistency_;
14353     }
14354 
14355     // optional uint32 caching = 17;
14356     public static final int CACHING_FIELD_NUMBER = 17;
14357     private int caching_;
14358     /**
14359      * <code>optional uint32 caching = 17;</code>
14360      */
14361     public boolean hasCaching() {
14362       return ((bitField0_ & 0x00004000) == 0x00004000);
14363     }
14364     /**
14365      * <code>optional uint32 caching = 17;</code>
14366      */
14367     public int getCaching() {
14368       return caching_;
14369     }
14370 
14371     private void initFields() {
14372       column_ = java.util.Collections.emptyList();
14373       attribute_ = java.util.Collections.emptyList();
14374       startRow_ = com.google.protobuf.ByteString.EMPTY;
14375       stopRow_ = com.google.protobuf.ByteString.EMPTY;
14376       filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
14377       timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
14378       maxVersions_ = 1;
14379       cacheBlocks_ = true;
14380       batchSize_ = 0;
14381       maxResultSize_ = 0L;
14382       storeLimit_ = 0;
14383       storeOffset_ = 0;
14384       loadColumnFamiliesOnDemand_ = false;
14385       small_ = false;
14386       reversed_ = false;
14387       consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
14388       caching_ = 0;
14389     }
14390     private byte memoizedIsInitialized = -1;
14391     public final boolean isInitialized() {
14392       byte isInitialized = memoizedIsInitialized;
14393       if (isInitialized != -1) return isInitialized == 1;
14394 
14395       for (int i = 0; i < getColumnCount(); i++) {
14396         if (!getColumn(i).isInitialized()) {
14397           memoizedIsInitialized = 0;
14398           return false;
14399         }
14400       }
14401       for (int i = 0; i < getAttributeCount(); i++) {
14402         if (!getAttribute(i).isInitialized()) {
14403           memoizedIsInitialized = 0;
14404           return false;
14405         }
14406       }
14407       if (hasFilter()) {
14408         if (!getFilter().isInitialized()) {
14409           memoizedIsInitialized = 0;
14410           return false;
14411         }
14412       }
14413       memoizedIsInitialized = 1;
14414       return true;
14415     }
14416 
14417     public void writeTo(com.google.protobuf.CodedOutputStream output)
14418                         throws java.io.IOException {
14419       getSerializedSize();
14420       for (int i = 0; i < column_.size(); i++) {
14421         output.writeMessage(1, column_.get(i));
14422       }
14423       for (int i = 0; i < attribute_.size(); i++) {
14424         output.writeMessage(2, attribute_.get(i));
14425       }
14426       if (((bitField0_ & 0x00000001) == 0x00000001)) {
14427         output.writeBytes(3, startRow_);
14428       }
14429       if (((bitField0_ & 0x00000002) == 0x00000002)) {
14430         output.writeBytes(4, stopRow_);
14431       }
14432       if (((bitField0_ & 0x00000004) == 0x00000004)) {
14433         output.writeMessage(5, filter_);
14434       }
14435       if (((bitField0_ & 0x00000008) == 0x00000008)) {
14436         output.writeMessage(6, timeRange_);
14437       }
14438       if (((bitField0_ & 0x00000010) == 0x00000010)) {
14439         output.writeUInt32(7, maxVersions_);
14440       }
14441       if (((bitField0_ & 0x00000020) == 0x00000020)) {
14442         output.writeBool(8, cacheBlocks_);
14443       }
14444       if (((bitField0_ & 0x00000040) == 0x00000040)) {
14445         output.writeUInt32(9, batchSize_);
14446       }
14447       if (((bitField0_ & 0x00000080) == 0x00000080)) {
14448         output.writeUInt64(10, maxResultSize_);
14449       }
14450       if (((bitField0_ & 0x00000100) == 0x00000100)) {
14451         output.writeUInt32(11, storeLimit_);
14452       }
14453       if (((bitField0_ & 0x00000200) == 0x00000200)) {
14454         output.writeUInt32(12, storeOffset_);
14455       }
14456       if (((bitField0_ & 0x00000400) == 0x00000400)) {
14457         output.writeBool(13, loadColumnFamiliesOnDemand_);
14458       }
14459       if (((bitField0_ & 0x00000800) == 0x00000800)) {
14460         output.writeBool(14, small_);
14461       }
14462       if (((bitField0_ & 0x00001000) == 0x00001000)) {
14463         output.writeBool(15, reversed_);
14464       }
14465       if (((bitField0_ & 0x00002000) == 0x00002000)) {
14466         output.writeEnum(16, consistency_.getNumber());
14467       }
14468       if (((bitField0_ & 0x00004000) == 0x00004000)) {
14469         output.writeUInt32(17, caching_);
14470       }
14471       getUnknownFields().writeTo(output);
14472     }
14473 
14474     private int memoizedSerializedSize = -1;
14475     public int getSerializedSize() {
14476       int size = memoizedSerializedSize;
14477       if (size != -1) return size;
14478 
14479       size = 0;
14480       for (int i = 0; i < column_.size(); i++) {
14481         size += com.google.protobuf.CodedOutputStream
14482           .computeMessageSize(1, column_.get(i));
14483       }
14484       for (int i = 0; i < attribute_.size(); i++) {
14485         size += com.google.protobuf.CodedOutputStream
14486           .computeMessageSize(2, attribute_.get(i));
14487       }
14488       if (((bitField0_ & 0x00000001) == 0x00000001)) {
14489         size += com.google.protobuf.CodedOutputStream
14490           .computeBytesSize(3, startRow_);
14491       }
14492       if (((bitField0_ & 0x00000002) == 0x00000002)) {
14493         size += com.google.protobuf.CodedOutputStream
14494           .computeBytesSize(4, stopRow_);
14495       }
14496       if (((bitField0_ & 0x00000004) == 0x00000004)) {
14497         size += com.google.protobuf.CodedOutputStream
14498           .computeMessageSize(5, filter_);
14499       }
14500       if (((bitField0_ & 0x00000008) == 0x00000008)) {
14501         size += com.google.protobuf.CodedOutputStream
14502           .computeMessageSize(6, timeRange_);
14503       }
14504       if (((bitField0_ & 0x00000010) == 0x00000010)) {
14505         size += com.google.protobuf.CodedOutputStream
14506           .computeUInt32Size(7, maxVersions_);
14507       }
14508       if (((bitField0_ & 0x00000020) == 0x00000020)) {
14509         size += com.google.protobuf.CodedOutputStream
14510           .computeBoolSize(8, cacheBlocks_);
14511       }
14512       if (((bitField0_ & 0x00000040) == 0x00000040)) {
14513         size += com.google.protobuf.CodedOutputStream
14514           .computeUInt32Size(9, batchSize_);
14515       }
14516       if (((bitField0_ & 0x00000080) == 0x00000080)) {
14517         size += com.google.protobuf.CodedOutputStream
14518           .computeUInt64Size(10, maxResultSize_);
14519       }
14520       if (((bitField0_ & 0x00000100) == 0x00000100)) {
14521         size += com.google.protobuf.CodedOutputStream
14522           .computeUInt32Size(11, storeLimit_);
14523       }
14524       if (((bitField0_ & 0x00000200) == 0x00000200)) {
14525         size += com.google.protobuf.CodedOutputStream
14526           .computeUInt32Size(12, storeOffset_);
14527       }
14528       if (((bitField0_ & 0x00000400) == 0x00000400)) {
14529         size += com.google.protobuf.CodedOutputStream
14530           .computeBoolSize(13, loadColumnFamiliesOnDemand_);
14531       }
14532       if (((bitField0_ & 0x00000800) == 0x00000800)) {
14533         size += com.google.protobuf.CodedOutputStream
14534           .computeBoolSize(14, small_);
14535       }
14536       if (((bitField0_ & 0x00001000) == 0x00001000)) {
14537         size += com.google.protobuf.CodedOutputStream
14538           .computeBoolSize(15, reversed_);
14539       }
14540       if (((bitField0_ & 0x00002000) == 0x00002000)) {
14541         size += com.google.protobuf.CodedOutputStream
14542           .computeEnumSize(16, consistency_.getNumber());
14543       }
14544       if (((bitField0_ & 0x00004000) == 0x00004000)) {
14545         size += com.google.protobuf.CodedOutputStream
14546           .computeUInt32Size(17, caching_);
14547       }
14548       size += getUnknownFields().getSerializedSize();
14549       memoizedSerializedSize = size;
14550       return size;
14551     }
14552 
14553     private static final long serialVersionUID = 0L;
14554     @java.lang.Override
14555     protected java.lang.Object writeReplace()
14556         throws java.io.ObjectStreamException {
14557       return super.writeReplace();
14558     }
14559 
14560     @java.lang.Override
14561     public boolean equals(final java.lang.Object obj) {
14562       if (obj == this) {
14563        return true;
14564       }
14565       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan)) {
14566         return super.equals(obj);
14567       }
14568       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) obj;
14569 
14570       boolean result = true;
14571       result = result && getColumnList()
14572           .equals(other.getColumnList());
14573       result = result && getAttributeList()
14574           .equals(other.getAttributeList());
14575       result = result && (hasStartRow() == other.hasStartRow());
14576       if (hasStartRow()) {
14577         result = result && getStartRow()
14578             .equals(other.getStartRow());
14579       }
14580       result = result && (hasStopRow() == other.hasStopRow());
14581       if (hasStopRow()) {
14582         result = result && getStopRow()
14583             .equals(other.getStopRow());
14584       }
14585       result = result && (hasFilter() == other.hasFilter());
14586       if (hasFilter()) {
14587         result = result && getFilter()
14588             .equals(other.getFilter());
14589       }
14590       result = result && (hasTimeRange() == other.hasTimeRange());
14591       if (hasTimeRange()) {
14592         result = result && getTimeRange()
14593             .equals(other.getTimeRange());
14594       }
14595       result = result && (hasMaxVersions() == other.hasMaxVersions());
14596       if (hasMaxVersions()) {
14597         result = result && (getMaxVersions()
14598             == other.getMaxVersions());
14599       }
14600       result = result && (hasCacheBlocks() == other.hasCacheBlocks());
14601       if (hasCacheBlocks()) {
14602         result = result && (getCacheBlocks()
14603             == other.getCacheBlocks());
14604       }
14605       result = result && (hasBatchSize() == other.hasBatchSize());
14606       if (hasBatchSize()) {
14607         result = result && (getBatchSize()
14608             == other.getBatchSize());
14609       }
14610       result = result && (hasMaxResultSize() == other.hasMaxResultSize());
14611       if (hasMaxResultSize()) {
14612         result = result && (getMaxResultSize()
14613             == other.getMaxResultSize());
14614       }
14615       result = result && (hasStoreLimit() == other.hasStoreLimit());
14616       if (hasStoreLimit()) {
14617         result = result && (getStoreLimit()
14618             == other.getStoreLimit());
14619       }
14620       result = result && (hasStoreOffset() == other.hasStoreOffset());
14621       if (hasStoreOffset()) {
14622         result = result && (getStoreOffset()
14623             == other.getStoreOffset());
14624       }
14625       result = result && (hasLoadColumnFamiliesOnDemand() == other.hasLoadColumnFamiliesOnDemand());
14626       if (hasLoadColumnFamiliesOnDemand()) {
14627         result = result && (getLoadColumnFamiliesOnDemand()
14628             == other.getLoadColumnFamiliesOnDemand());
14629       }
14630       result = result && (hasSmall() == other.hasSmall());
14631       if (hasSmall()) {
14632         result = result && (getSmall()
14633             == other.getSmall());
14634       }
14635       result = result && (hasReversed() == other.hasReversed());
14636       if (hasReversed()) {
14637         result = result && (getReversed()
14638             == other.getReversed());
14639       }
14640       result = result && (hasConsistency() == other.hasConsistency());
14641       if (hasConsistency()) {
14642         result = result &&
14643             (getConsistency() == other.getConsistency());
14644       }
14645       result = result && (hasCaching() == other.hasCaching());
14646       if (hasCaching()) {
14647         result = result && (getCaching()
14648             == other.getCaching());
14649       }
14650       result = result &&
14651           getUnknownFields().equals(other.getUnknownFields());
14652       return result;
14653     }
14654 
14655     private int memoizedHashCode = 0;
14656     @java.lang.Override
14657     public int hashCode() {
14658       if (memoizedHashCode != 0) {
14659         return memoizedHashCode;
14660       }
14661       int hash = 41;
14662       hash = (19 * hash) + getDescriptorForType().hashCode();
14663       if (getColumnCount() > 0) {
14664         hash = (37 * hash) + COLUMN_FIELD_NUMBER;
14665         hash = (53 * hash) + getColumnList().hashCode();
14666       }
14667       if (getAttributeCount() > 0) {
14668         hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER;
14669         hash = (53 * hash) + getAttributeList().hashCode();
14670       }
14671       if (hasStartRow()) {
14672         hash = (37 * hash) + START_ROW_FIELD_NUMBER;
14673         hash = (53 * hash) + getStartRow().hashCode();
14674       }
14675       if (hasStopRow()) {
14676         hash = (37 * hash) + STOP_ROW_FIELD_NUMBER;
14677         hash = (53 * hash) + getStopRow().hashCode();
14678       }
14679       if (hasFilter()) {
14680         hash = (37 * hash) + FILTER_FIELD_NUMBER;
14681         hash = (53 * hash) + getFilter().hashCode();
14682       }
14683       if (hasTimeRange()) {
14684         hash = (37 * hash) + TIME_RANGE_FIELD_NUMBER;
14685         hash = (53 * hash) + getTimeRange().hashCode();
14686       }
14687       if (hasMaxVersions()) {
14688         hash = (37 * hash) + MAX_VERSIONS_FIELD_NUMBER;
14689         hash = (53 * hash) + getMaxVersions();
14690       }
14691       if (hasCacheBlocks()) {
14692         hash = (37 * hash) + CACHE_BLOCKS_FIELD_NUMBER;
14693         hash = (53 * hash) + hashBoolean(getCacheBlocks());
14694       }
14695       if (hasBatchSize()) {
14696         hash = (37 * hash) + BATCH_SIZE_FIELD_NUMBER;
14697         hash = (53 * hash) + getBatchSize();
14698       }
14699       if (hasMaxResultSize()) {
14700         hash = (37 * hash) + MAX_RESULT_SIZE_FIELD_NUMBER;
14701         hash = (53 * hash) + hashLong(getMaxResultSize());
14702       }
14703       if (hasStoreLimit()) {
14704         hash = (37 * hash) + STORE_LIMIT_FIELD_NUMBER;
14705         hash = (53 * hash) + getStoreLimit();
14706       }
14707       if (hasStoreOffset()) {
14708         hash = (37 * hash) + STORE_OFFSET_FIELD_NUMBER;
14709         hash = (53 * hash) + getStoreOffset();
14710       }
14711       if (hasLoadColumnFamiliesOnDemand()) {
14712         hash = (37 * hash) + LOAD_COLUMN_FAMILIES_ON_DEMAND_FIELD_NUMBER;
14713         hash = (53 * hash) + hashBoolean(getLoadColumnFamiliesOnDemand());
14714       }
14715       if (hasSmall()) {
14716         hash = (37 * hash) + SMALL_FIELD_NUMBER;
14717         hash = (53 * hash) + hashBoolean(getSmall());
14718       }
14719       if (hasReversed()) {
14720         hash = (37 * hash) + REVERSED_FIELD_NUMBER;
14721         hash = (53 * hash) + hashBoolean(getReversed());
14722       }
14723       if (hasConsistency()) {
14724         hash = (37 * hash) + CONSISTENCY_FIELD_NUMBER;
14725         hash = (53 * hash) + hashEnum(getConsistency());
14726       }
14727       if (hasCaching()) {
14728         hash = (37 * hash) + CACHING_FIELD_NUMBER;
14729         hash = (53 * hash) + getCaching();
14730       }
14731       hash = (29 * hash) + getUnknownFields().hashCode();
14732       memoizedHashCode = hash;
14733       return hash;
14734     }
14735 
14736     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(
14737         com.google.protobuf.ByteString data)
14738         throws com.google.protobuf.InvalidProtocolBufferException {
14739       return PARSER.parseFrom(data);
14740     }
14741     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(
14742         com.google.protobuf.ByteString data,
14743         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14744         throws com.google.protobuf.InvalidProtocolBufferException {
14745       return PARSER.parseFrom(data, extensionRegistry);
14746     }
14747     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(byte[] data)
14748         throws com.google.protobuf.InvalidProtocolBufferException {
14749       return PARSER.parseFrom(data);
14750     }
14751     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(
14752         byte[] data,
14753         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14754         throws com.google.protobuf.InvalidProtocolBufferException {
14755       return PARSER.parseFrom(data, extensionRegistry);
14756     }
14757     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(java.io.InputStream input)
14758         throws java.io.IOException {
14759       return PARSER.parseFrom(input);
14760     }
14761     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(
14762         java.io.InputStream input,
14763         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14764         throws java.io.IOException {
14765       return PARSER.parseFrom(input, extensionRegistry);
14766     }
14767     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseDelimitedFrom(java.io.InputStream input)
14768         throws java.io.IOException {
14769       return PARSER.parseDelimitedFrom(input);
14770     }
14771     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseDelimitedFrom(
14772         java.io.InputStream input,
14773         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14774         throws java.io.IOException {
14775       return PARSER.parseDelimitedFrom(input, extensionRegistry);
14776     }
14777     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(
14778         com.google.protobuf.CodedInputStream input)
14779         throws java.io.IOException {
14780       return PARSER.parseFrom(input);
14781     }
14782     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(
14783         com.google.protobuf.CodedInputStream input,
14784         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14785         throws java.io.IOException {
14786       return PARSER.parseFrom(input, extensionRegistry);
14787     }
14788 
14789     public static Builder newBuilder() { return Builder.create(); }
14790     public Builder newBuilderForType() { return newBuilder(); }
14791     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan prototype) {
14792       return newBuilder().mergeFrom(prototype);
14793     }
14794     public Builder toBuilder() { return newBuilder(this); }
14795 
14796     @java.lang.Override
14797     protected Builder newBuilderForType(
14798         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
14799       Builder builder = new Builder(parent);
14800       return builder;
14801     }
14802     /**
14803      * Protobuf type {@code Scan}
14804      *
14805      * <pre>
14806      **
14807      * Instead of get from a table, you can scan it with optional filters.
14808      * You can specify the row key range, time range, the columns/families
14809      * to scan and so on.
14810      *
14811      * This scan is used the first time in a scan request. The response of
14812      * the initial scan will return a scanner id, which should be used to
14813      * fetch result batches later on before it is closed.
14814      * </pre>
14815      */
14816     public static final class Builder extends
14817         com.google.protobuf.GeneratedMessage.Builder<Builder>
14818        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder {
14819       public static final com.google.protobuf.Descriptors.Descriptor
14820           getDescriptor() {
14821         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_descriptor;
14822       }
14823 
14824       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
14825           internalGetFieldAccessorTable() {
14826         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_fieldAccessorTable
14827             .ensureFieldAccessorsInitialized(
14828                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder.class);
14829       }
14830 
14831       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder()
14832       private Builder() {
14833         maybeForceBuilderInitialization();
14834       }
14835 
14836       private Builder(
14837           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
14838         super(parent);
14839         maybeForceBuilderInitialization();
14840       }
14841       private void maybeForceBuilderInitialization() {
14842         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
14843           getColumnFieldBuilder();
14844           getAttributeFieldBuilder();
14845           getFilterFieldBuilder();
14846           getTimeRangeFieldBuilder();
14847         }
14848       }
14849       private static Builder create() {
14850         return new Builder();
14851       }
14852 
14853       public Builder clear() {
14854         super.clear();
14855         if (columnBuilder_ == null) {
14856           column_ = java.util.Collections.emptyList();
14857           bitField0_ = (bitField0_ & ~0x00000001);
14858         } else {
14859           columnBuilder_.clear();
14860         }
14861         if (attributeBuilder_ == null) {
14862           attribute_ = java.util.Collections.emptyList();
14863           bitField0_ = (bitField0_ & ~0x00000002);
14864         } else {
14865           attributeBuilder_.clear();
14866         }
14867         startRow_ = com.google.protobuf.ByteString.EMPTY;
14868         bitField0_ = (bitField0_ & ~0x00000004);
14869         stopRow_ = com.google.protobuf.ByteString.EMPTY;
14870         bitField0_ = (bitField0_ & ~0x00000008);
14871         if (filterBuilder_ == null) {
14872           filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
14873         } else {
14874           filterBuilder_.clear();
14875         }
14876         bitField0_ = (bitField0_ & ~0x00000010);
14877         if (timeRangeBuilder_ == null) {
14878           timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
14879         } else {
14880           timeRangeBuilder_.clear();
14881         }
14882         bitField0_ = (bitField0_ & ~0x00000020);
14883         maxVersions_ = 1;
14884         bitField0_ = (bitField0_ & ~0x00000040);
14885         cacheBlocks_ = true;
14886         bitField0_ = (bitField0_ & ~0x00000080);
14887         batchSize_ = 0;
14888         bitField0_ = (bitField0_ & ~0x00000100);
14889         maxResultSize_ = 0L;
14890         bitField0_ = (bitField0_ & ~0x00000200);
14891         storeLimit_ = 0;
14892         bitField0_ = (bitField0_ & ~0x00000400);
14893         storeOffset_ = 0;
14894         bitField0_ = (bitField0_ & ~0x00000800);
14895         loadColumnFamiliesOnDemand_ = false;
14896         bitField0_ = (bitField0_ & ~0x00001000);
14897         small_ = false;
14898         bitField0_ = (bitField0_ & ~0x00002000);
14899         reversed_ = false;
14900         bitField0_ = (bitField0_ & ~0x00004000);
14901         consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
14902         bitField0_ = (bitField0_ & ~0x00008000);
14903         caching_ = 0;
14904         bitField0_ = (bitField0_ & ~0x00010000);
14905         return this;
14906       }
14907 
14908       public Builder clone() {
14909         return create().mergeFrom(buildPartial());
14910       }
14911 
14912       public com.google.protobuf.Descriptors.Descriptor
14913           getDescriptorForType() {
14914         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_descriptor;
14915       }
14916 
14917       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getDefaultInstanceForType() {
14918         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
14919       }
14920 
14921       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan build() {
14922         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = buildPartial();
14923         if (!result.isInitialized()) {
14924           throw newUninitializedMessageException(result);
14925         }
14926         return result;
14927       }
14928 
14929       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan buildPartial() {
14930         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan(this);
14931         int from_bitField0_ = bitField0_;
14932         int to_bitField0_ = 0;
14933         if (columnBuilder_ == null) {
14934           if (((bitField0_ & 0x00000001) == 0x00000001)) {
14935             column_ = java.util.Collections.unmodifiableList(column_);
14936             bitField0_ = (bitField0_ & ~0x00000001);
14937           }
14938           result.column_ = column_;
14939         } else {
14940           result.column_ = columnBuilder_.build();
14941         }
14942         if (attributeBuilder_ == null) {
14943           if (((bitField0_ & 0x00000002) == 0x00000002)) {
14944             attribute_ = java.util.Collections.unmodifiableList(attribute_);
14945             bitField0_ = (bitField0_ & ~0x00000002);
14946           }
14947           result.attribute_ = attribute_;
14948         } else {
14949           result.attribute_ = attributeBuilder_.build();
14950         }
14951         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
14952           to_bitField0_ |= 0x00000001;
14953         }
14954         result.startRow_ = startRow_;
14955         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
14956           to_bitField0_ |= 0x00000002;
14957         }
14958         result.stopRow_ = stopRow_;
14959         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
14960           to_bitField0_ |= 0x00000004;
14961         }
14962         if (filterBuilder_ == null) {
14963           result.filter_ = filter_;
14964         } else {
14965           result.filter_ = filterBuilder_.build();
14966         }
14967         if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
14968           to_bitField0_ |= 0x00000008;
14969         }
14970         if (timeRangeBuilder_ == null) {
14971           result.timeRange_ = timeRange_;
14972         } else {
14973           result.timeRange_ = timeRangeBuilder_.build();
14974         }
14975         if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
14976           to_bitField0_ |= 0x00000010;
14977         }
14978         result.maxVersions_ = maxVersions_;
14979         if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
14980           to_bitField0_ |= 0x00000020;
14981         }
14982         result.cacheBlocks_ = cacheBlocks_;
14983         if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
14984           to_bitField0_ |= 0x00000040;
14985         }
14986         result.batchSize_ = batchSize_;
14987         if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
14988           to_bitField0_ |= 0x00000080;
14989         }
14990         result.maxResultSize_ = maxResultSize_;
14991         if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
14992           to_bitField0_ |= 0x00000100;
14993         }
14994         result.storeLimit_ = storeLimit_;
14995         if (((from_bitField0_ & 0x00000800) == 0x00000800)) {
14996           to_bitField0_ |= 0x00000200;
14997         }
14998         result.storeOffset_ = storeOffset_;
14999         if (((from_bitField0_ & 0x00001000) == 0x00001000)) {
15000           to_bitField0_ |= 0x00000400;
15001         }
15002         result.loadColumnFamiliesOnDemand_ = loadColumnFamiliesOnDemand_;
15003         if (((from_bitField0_ & 0x00002000) == 0x00002000)) {
15004           to_bitField0_ |= 0x00000800;
15005         }
15006         result.small_ = small_;
15007         if (((from_bitField0_ & 0x00004000) == 0x00004000)) {
15008           to_bitField0_ |= 0x00001000;
15009         }
15010         result.reversed_ = reversed_;
15011         if (((from_bitField0_ & 0x00008000) == 0x00008000)) {
15012           to_bitField0_ |= 0x00002000;
15013         }
15014         result.consistency_ = consistency_;
15015         if (((from_bitField0_ & 0x00010000) == 0x00010000)) {
15016           to_bitField0_ |= 0x00004000;
15017         }
15018         result.caching_ = caching_;
15019         result.bitField0_ = to_bitField0_;
15020         onBuilt();
15021         return result;
15022       }
15023 
15024       public Builder mergeFrom(com.google.protobuf.Message other) {
15025         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) {
15026           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan)other);
15027         } else {
15028           super.mergeFrom(other);
15029           return this;
15030         }
15031       }
15032 
15033       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan other) {
15034         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) return this;
15035         if (columnBuilder_ == null) {
15036           if (!other.column_.isEmpty()) {
15037             if (column_.isEmpty()) {
15038               column_ = other.column_;
15039               bitField0_ = (bitField0_ & ~0x00000001);
15040             } else {
15041               ensureColumnIsMutable();
15042               column_.addAll(other.column_);
15043             }
15044             onChanged();
15045           }
15046         } else {
15047           if (!other.column_.isEmpty()) {
15048             if (columnBuilder_.isEmpty()) {
15049               columnBuilder_.dispose();
15050               columnBuilder_ = null;
15051               column_ = other.column_;
15052               bitField0_ = (bitField0_ & ~0x00000001);
15053               columnBuilder_ = 
15054                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
15055                    getColumnFieldBuilder() : null;
15056             } else {
15057               columnBuilder_.addAllMessages(other.column_);
15058             }
15059           }
15060         }
15061         if (attributeBuilder_ == null) {
15062           if (!other.attribute_.isEmpty()) {
15063             if (attribute_.isEmpty()) {
15064               attribute_ = other.attribute_;
15065               bitField0_ = (bitField0_ & ~0x00000002);
15066             } else {
15067               ensureAttributeIsMutable();
15068               attribute_.addAll(other.attribute_);
15069             }
15070             onChanged();
15071           }
15072         } else {
15073           if (!other.attribute_.isEmpty()) {
15074             if (attributeBuilder_.isEmpty()) {
15075               attributeBuilder_.dispose();
15076               attributeBuilder_ = null;
15077               attribute_ = other.attribute_;
15078               bitField0_ = (bitField0_ & ~0x00000002);
15079               attributeBuilder_ = 
15080                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
15081                    getAttributeFieldBuilder() : null;
15082             } else {
15083               attributeBuilder_.addAllMessages(other.attribute_);
15084             }
15085           }
15086         }
15087         if (other.hasStartRow()) {
15088           setStartRow(other.getStartRow());
15089         }
15090         if (other.hasStopRow()) {
15091           setStopRow(other.getStopRow());
15092         }
15093         if (other.hasFilter()) {
15094           mergeFilter(other.getFilter());
15095         }
15096         if (other.hasTimeRange()) {
15097           mergeTimeRange(other.getTimeRange());
15098         }
15099         if (other.hasMaxVersions()) {
15100           setMaxVersions(other.getMaxVersions());
15101         }
15102         if (other.hasCacheBlocks()) {
15103           setCacheBlocks(other.getCacheBlocks());
15104         }
15105         if (other.hasBatchSize()) {
15106           setBatchSize(other.getBatchSize());
15107         }
15108         if (other.hasMaxResultSize()) {
15109           setMaxResultSize(other.getMaxResultSize());
15110         }
15111         if (other.hasStoreLimit()) {
15112           setStoreLimit(other.getStoreLimit());
15113         }
15114         if (other.hasStoreOffset()) {
15115           setStoreOffset(other.getStoreOffset());
15116         }
15117         if (other.hasLoadColumnFamiliesOnDemand()) {
15118           setLoadColumnFamiliesOnDemand(other.getLoadColumnFamiliesOnDemand());
15119         }
15120         if (other.hasSmall()) {
15121           setSmall(other.getSmall());
15122         }
15123         if (other.hasReversed()) {
15124           setReversed(other.getReversed());
15125         }
15126         if (other.hasConsistency()) {
15127           setConsistency(other.getConsistency());
15128         }
15129         if (other.hasCaching()) {
15130           setCaching(other.getCaching());
15131         }
15132         this.mergeUnknownFields(other.getUnknownFields());
15133         return this;
15134       }
15135 
15136       public final boolean isInitialized() {
15137         for (int i = 0; i < getColumnCount(); i++) {
15138           if (!getColumn(i).isInitialized()) {
15139             
15140             return false;
15141           }
15142         }
15143         for (int i = 0; i < getAttributeCount(); i++) {
15144           if (!getAttribute(i).isInitialized()) {
15145             
15146             return false;
15147           }
15148         }
15149         if (hasFilter()) {
15150           if (!getFilter().isInitialized()) {
15151             
15152             return false;
15153           }
15154         }
15155         return true;
15156       }
15157 
15158       public Builder mergeFrom(
15159           com.google.protobuf.CodedInputStream input,
15160           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15161           throws java.io.IOException {
15162         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parsedMessage = null;
15163         try {
15164           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
15165         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
15166           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) e.getUnfinishedMessage();
15167           throw e;
15168         } finally {
15169           if (parsedMessage != null) {
15170             mergeFrom(parsedMessage);
15171           }
15172         }
15173         return this;
15174       }
15175       private int bitField0_;
15176 
15177       // repeated .Column column = 1;
15178       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> column_ =
15179         java.util.Collections.emptyList();
15180       private void ensureColumnIsMutable() {
15181         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
15182           column_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column>(column_);
15183           bitField0_ |= 0x00000001;
15184          }
15185       }
15186 
15187       private com.google.protobuf.RepeatedFieldBuilder<
15188           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> columnBuilder_;
15189 
15190       /**
15191        * <code>repeated .Column column = 1;</code>
15192        */
15193       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> getColumnList() {
15194         if (columnBuilder_ == null) {
15195           return java.util.Collections.unmodifiableList(column_);
15196         } else {
15197           return columnBuilder_.getMessageList();
15198         }
15199       }
15200       /**
15201        * <code>repeated .Column column = 1;</code>
15202        */
15203       public int getColumnCount() {
15204         if (columnBuilder_ == null) {
15205           return column_.size();
15206         } else {
15207           return columnBuilder_.getCount();
15208         }
15209       }
15210       /**
15211        * <code>repeated .Column column = 1;</code>
15212        */
15213       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) {
15214         if (columnBuilder_ == null) {
15215           return column_.get(index);
15216         } else {
15217           return columnBuilder_.getMessage(index);
15218         }
15219       }
15220       /**
15221        * <code>repeated .Column column = 1;</code>
15222        */
15223       public Builder setColumn(
15224           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) {
15225         if (columnBuilder_ == null) {
15226           if (value == null) {
15227             throw new NullPointerException();
15228           }
15229           ensureColumnIsMutable();
15230           column_.set(index, value);
15231           onChanged();
15232         } else {
15233           columnBuilder_.setMessage(index, value);
15234         }
15235         return this;
15236       }
15237       /**
15238        * <code>repeated .Column column = 1;</code>
15239        */
15240       public Builder setColumn(
15241           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) {
15242         if (columnBuilder_ == null) {
15243           ensureColumnIsMutable();
15244           column_.set(index, builderForValue.build());
15245           onChanged();
15246         } else {
15247           columnBuilder_.setMessage(index, builderForValue.build());
15248         }
15249         return this;
15250       }
15251       /**
15252        * <code>repeated .Column column = 1;</code>
15253        */
15254       public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) {
15255         if (columnBuilder_ == null) {
15256           if (value == null) {
15257             throw new NullPointerException();
15258           }
15259           ensureColumnIsMutable();
15260           column_.add(value);
15261           onChanged();
15262         } else {
15263           columnBuilder_.addMessage(value);
15264         }
15265         return this;
15266       }
15267       /**
15268        * <code>repeated .Column column = 1;</code>
15269        */
15270       public Builder addColumn(
15271           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) {
15272         if (columnBuilder_ == null) {
15273           if (value == null) {
15274             throw new NullPointerException();
15275           }
15276           ensureColumnIsMutable();
15277           column_.add(index, value);
15278           onChanged();
15279         } else {
15280           columnBuilder_.addMessage(index, value);
15281         }
15282         return this;
15283       }
15284       /**
15285        * <code>repeated .Column column = 1;</code>
15286        */
15287       public Builder addColumn(
15288           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) {
15289         if (columnBuilder_ == null) {
15290           ensureColumnIsMutable();
15291           column_.add(builderForValue.build());
15292           onChanged();
15293         } else {
15294           columnBuilder_.addMessage(builderForValue.build());
15295         }
15296         return this;
15297       }
15298       /**
15299        * <code>repeated .Column column = 1;</code>
15300        */
15301       public Builder addColumn(
15302           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) {
15303         if (columnBuilder_ == null) {
15304           ensureColumnIsMutable();
15305           column_.add(index, builderForValue.build());
15306           onChanged();
15307         } else {
15308           columnBuilder_.addMessage(index, builderForValue.build());
15309         }
15310         return this;
15311       }
15312       /**
15313        * <code>repeated .Column column = 1;</code>
15314        */
15315       public Builder addAllColumn(
15316           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> values) {
15317         if (columnBuilder_ == null) {
15318           ensureColumnIsMutable();
15319           super.addAll(values, column_);
15320           onChanged();
15321         } else {
15322           columnBuilder_.addAllMessages(values);
15323         }
15324         return this;
15325       }
15326       /**
15327        * <code>repeated .Column column = 1;</code>
15328        */
15329       public Builder clearColumn() {
15330         if (columnBuilder_ == null) {
15331           column_ = java.util.Collections.emptyList();
15332           bitField0_ = (bitField0_ & ~0x00000001);
15333           onChanged();
15334         } else {
15335           columnBuilder_.clear();
15336         }
15337         return this;
15338       }
15339       /**
15340        * <code>repeated .Column column = 1;</code>
15341        */
15342       public Builder removeColumn(int index) {
15343         if (columnBuilder_ == null) {
15344           ensureColumnIsMutable();
15345           column_.remove(index);
15346           onChanged();
15347         } else {
15348           columnBuilder_.remove(index);
15349         }
15350         return this;
15351       }
15352       /**
15353        * <code>repeated .Column column = 1;</code>
15354        */
15355       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder getColumnBuilder(
15356           int index) {
15357         return getColumnFieldBuilder().getBuilder(index);
15358       }
15359       /**
15360        * <code>repeated .Column column = 1;</code>
15361        */
15362       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder(
15363           int index) {
15364         if (columnBuilder_ == null) {
15365           return column_.get(index);  } else {
15366           return columnBuilder_.getMessageOrBuilder(index);
15367         }
15368       }
15369       /**
15370        * <code>repeated .Column column = 1;</code>
15371        */
15372       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> 
15373            getColumnOrBuilderList() {
15374         if (columnBuilder_ != null) {
15375           return columnBuilder_.getMessageOrBuilderList();
15376         } else {
15377           return java.util.Collections.unmodifiableList(column_);
15378         }
15379       }
15380       /**
15381        * <code>repeated .Column column = 1;</code>
15382        */
15383       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder() {
15384         return getColumnFieldBuilder().addBuilder(
15385             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance());
15386       }
15387       /**
15388        * <code>repeated .Column column = 1;</code>
15389        */
15390       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder(
15391           int index) {
15392         return getColumnFieldBuilder().addBuilder(
15393             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance());
15394       }
15395       /**
15396        * <code>repeated .Column column = 1;</code>
15397        */
15398       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder> 
15399            getColumnBuilderList() {
15400         return getColumnFieldBuilder().getBuilderList();
15401       }
15402       private com.google.protobuf.RepeatedFieldBuilder<
15403           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> 
15404           getColumnFieldBuilder() {
15405         if (columnBuilder_ == null) {
15406           columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
15407               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>(
15408                   column_,
15409                   ((bitField0_ & 0x00000001) == 0x00000001),
15410                   getParentForChildren(),
15411                   isClean());
15412           column_ = null;
15413         }
15414         return columnBuilder_;
15415       }
15416 
15417       // repeated .NameBytesPair attribute = 2;
15418       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_ =
15419         java.util.Collections.emptyList();
15420       private void ensureAttributeIsMutable() {
15421         if (!((bitField0_ & 0x00000002) == 0x00000002)) {
15422           attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>(attribute_);
15423           bitField0_ |= 0x00000002;
15424          }
15425       }
15426 
15427       private com.google.protobuf.RepeatedFieldBuilder<
15428           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_;
15429 
15430       /**
15431        * <code>repeated .NameBytesPair attribute = 2;</code>
15432        */
15433       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() {
15434         if (attributeBuilder_ == null) {
15435           return java.util.Collections.unmodifiableList(attribute_);
15436         } else {
15437           return attributeBuilder_.getMessageList();
15438         }
15439       }
15440       /**
15441        * <code>repeated .NameBytesPair attribute = 2;</code>
15442        */
15443       public int getAttributeCount() {
15444         if (attributeBuilder_ == null) {
15445           return attribute_.size();
15446         } else {
15447           return attributeBuilder_.getCount();
15448         }
15449       }
15450       /**
15451        * <code>repeated .NameBytesPair attribute = 2;</code>
15452        */
15453       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) {
15454         if (attributeBuilder_ == null) {
15455           return attribute_.get(index);
15456         } else {
15457           return attributeBuilder_.getMessage(index);
15458         }
15459       }
15460       /**
15461        * <code>repeated .NameBytesPair attribute = 2;</code>
15462        */
15463       public Builder setAttribute(
15464           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
15465         if (attributeBuilder_ == null) {
15466           if (value == null) {
15467             throw new NullPointerException();
15468           }
15469           ensureAttributeIsMutable();
15470           attribute_.set(index, value);
15471           onChanged();
15472         } else {
15473           attributeBuilder_.setMessage(index, value);
15474         }
15475         return this;
15476       }
15477       /**
15478        * <code>repeated .NameBytesPair attribute = 2;</code>
15479        */
15480       public Builder setAttribute(
15481           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
15482         if (attributeBuilder_ == null) {
15483           ensureAttributeIsMutable();
15484           attribute_.set(index, builderForValue.build());
15485           onChanged();
15486         } else {
15487           attributeBuilder_.setMessage(index, builderForValue.build());
15488         }
15489         return this;
15490       }
15491       /**
15492        * <code>repeated .NameBytesPair attribute = 2;</code>
15493        */
15494       public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
15495         if (attributeBuilder_ == null) {
15496           if (value == null) {
15497             throw new NullPointerException();
15498           }
15499           ensureAttributeIsMutable();
15500           attribute_.add(value);
15501           onChanged();
15502         } else {
15503           attributeBuilder_.addMessage(value);
15504         }
15505         return this;
15506       }
15507       /**
15508        * <code>repeated .NameBytesPair attribute = 2;</code>
15509        */
15510       public Builder addAttribute(
15511           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
15512         if (attributeBuilder_ == null) {
15513           if (value == null) {
15514             throw new NullPointerException();
15515           }
15516           ensureAttributeIsMutable();
15517           attribute_.add(index, value);
15518           onChanged();
15519         } else {
15520           attributeBuilder_.addMessage(index, value);
15521         }
15522         return this;
15523       }
15524       /**
15525        * <code>repeated .NameBytesPair attribute = 2;</code>
15526        */
15527       public Builder addAttribute(
15528           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
15529         if (attributeBuilder_ == null) {
15530           ensureAttributeIsMutable();
15531           attribute_.add(builderForValue.build());
15532           onChanged();
15533         } else {
15534           attributeBuilder_.addMessage(builderForValue.build());
15535         }
15536         return this;
15537       }
15538       /**
15539        * <code>repeated .NameBytesPair attribute = 2;</code>
15540        */
15541       public Builder addAttribute(
15542           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
15543         if (attributeBuilder_ == null) {
15544           ensureAttributeIsMutable();
15545           attribute_.add(index, builderForValue.build());
15546           onChanged();
15547         } else {
15548           attributeBuilder_.addMessage(index, builderForValue.build());
15549         }
15550         return this;
15551       }
15552       /**
15553        * <code>repeated .NameBytesPair attribute = 2;</code>
15554        */
15555       public Builder addAllAttribute(
15556           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> values) {
15557         if (attributeBuilder_ == null) {
15558           ensureAttributeIsMutable();
15559           super.addAll(values, attribute_);
15560           onChanged();
15561         } else {
15562           attributeBuilder_.addAllMessages(values);
15563         }
15564         return this;
15565       }
15566       /**
15567        * <code>repeated .NameBytesPair attribute = 2;</code>
15568        */
15569       public Builder clearAttribute() {
15570         if (attributeBuilder_ == null) {
15571           attribute_ = java.util.Collections.emptyList();
15572           bitField0_ = (bitField0_ & ~0x00000002);
15573           onChanged();
15574         } else {
15575           attributeBuilder_.clear();
15576         }
15577         return this;
15578       }
15579       /**
15580        * <code>repeated .NameBytesPair attribute = 2;</code>
15581        */
15582       public Builder removeAttribute(int index) {
15583         if (attributeBuilder_ == null) {
15584           ensureAttributeIsMutable();
15585           attribute_.remove(index);
15586           onChanged();
15587         } else {
15588           attributeBuilder_.remove(index);
15589         }
15590         return this;
15591       }
15592       /**
15593        * <code>repeated .NameBytesPair attribute = 2;</code>
15594        */
15595       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder(
15596           int index) {
15597         return getAttributeFieldBuilder().getBuilder(index);
15598       }
15599       /**
15600        * <code>repeated .NameBytesPair attribute = 2;</code>
15601        */
15602       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
15603           int index) {
15604         if (attributeBuilder_ == null) {
15605           return attribute_.get(index);  } else {
15606           return attributeBuilder_.getMessageOrBuilder(index);
15607         }
15608       }
15609       /**
15610        * <code>repeated .NameBytesPair attribute = 2;</code>
15611        */
15612       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
15613            getAttributeOrBuilderList() {
15614         if (attributeBuilder_ != null) {
15615           return attributeBuilder_.getMessageOrBuilderList();
15616         } else {
15617           return java.util.Collections.unmodifiableList(attribute_);
15618         }
15619       }
15620       /**
15621        * <code>repeated .NameBytesPair attribute = 2;</code>
15622        */
15623       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() {
15624         return getAttributeFieldBuilder().addBuilder(
15625             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
15626       }
15627       /**
15628        * <code>repeated .NameBytesPair attribute = 2;</code>
15629        */
15630       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder(
15631           int index) {
15632         return getAttributeFieldBuilder().addBuilder(
15633             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
15634       }
15635       /**
15636        * <code>repeated .NameBytesPair attribute = 2;</code>
15637        */
15638       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder> 
15639            getAttributeBuilderList() {
15640         return getAttributeFieldBuilder().getBuilderList();
15641       }
15642       private com.google.protobuf.RepeatedFieldBuilder<
15643           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
15644           getAttributeFieldBuilder() {
15645         if (attributeBuilder_ == null) {
15646           attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
15647               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
15648                   attribute_,
15649                   ((bitField0_ & 0x00000002) == 0x00000002),
15650                   getParentForChildren(),
15651                   isClean());
15652           attribute_ = null;
15653         }
15654         return attributeBuilder_;
15655       }
15656 
15657       // optional bytes start_row = 3;
15658       private com.google.protobuf.ByteString startRow_ = com.google.protobuf.ByteString.EMPTY;
15659       /**
15660        * <code>optional bytes start_row = 3;</code>
15661        */
15662       public boolean hasStartRow() {
15663         return ((bitField0_ & 0x00000004) == 0x00000004);
15664       }
15665       /**
15666        * <code>optional bytes start_row = 3;</code>
15667        */
15668       public com.google.protobuf.ByteString getStartRow() {
15669         return startRow_;
15670       }
15671       /**
15672        * <code>optional bytes start_row = 3;</code>
15673        */
15674       public Builder setStartRow(com.google.protobuf.ByteString value) {
15675         if (value == null) {
15676     throw new NullPointerException();
15677   }
15678   bitField0_ |= 0x00000004;
15679         startRow_ = value;
15680         onChanged();
15681         return this;
15682       }
15683       /**
15684        * <code>optional bytes start_row = 3;</code>
15685        */
15686       public Builder clearStartRow() {
15687         bitField0_ = (bitField0_ & ~0x00000004);
15688         startRow_ = getDefaultInstance().getStartRow();
15689         onChanged();
15690         return this;
15691       }
15692 
15693       // optional bytes stop_row = 4;
15694       private com.google.protobuf.ByteString stopRow_ = com.google.protobuf.ByteString.EMPTY;
15695       /**
15696        * <code>optional bytes stop_row = 4;</code>
15697        */
15698       public boolean hasStopRow() {
15699         return ((bitField0_ & 0x00000008) == 0x00000008);
15700       }
15701       /**
15702        * <code>optional bytes stop_row = 4;</code>
15703        */
15704       public com.google.protobuf.ByteString getStopRow() {
15705         return stopRow_;
15706       }
15707       /**
15708        * <code>optional bytes stop_row = 4;</code>
15709        */
15710       public Builder setStopRow(com.google.protobuf.ByteString value) {
15711         if (value == null) {
15712     throw new NullPointerException();
15713   }
15714   bitField0_ |= 0x00000008;
15715         stopRow_ = value;
15716         onChanged();
15717         return this;
15718       }
15719       /**
15720        * <code>optional bytes stop_row = 4;</code>
15721        */
15722       public Builder clearStopRow() {
15723         bitField0_ = (bitField0_ & ~0x00000008);
15724         stopRow_ = getDefaultInstance().getStopRow();
15725         onChanged();
15726         return this;
15727       }
15728 
15729       // optional .Filter filter = 5;
15730       private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
15731       private com.google.protobuf.SingleFieldBuilder<
15732           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_;
15733       /**
15734        * <code>optional .Filter filter = 5;</code>
15735        */
15736       public boolean hasFilter() {
15737         return ((bitField0_ & 0x00000010) == 0x00000010);
15738       }
15739       /**
15740        * <code>optional .Filter filter = 5;</code>
15741        */
15742       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
15743         if (filterBuilder_ == null) {
15744           return filter_;
15745         } else {
15746           return filterBuilder_.getMessage();
15747         }
15748       }
15749       /**
15750        * <code>optional .Filter filter = 5;</code>
15751        */
15752       public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
15753         if (filterBuilder_ == null) {
15754           if (value == null) {
15755             throw new NullPointerException();
15756           }
15757           filter_ = value;
15758           onChanged();
15759         } else {
15760           filterBuilder_.setMessage(value);
15761         }
15762         bitField0_ |= 0x00000010;
15763         return this;
15764       }
15765       /**
15766        * <code>optional .Filter filter = 5;</code>
15767        */
15768       public Builder setFilter(
15769           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) {
15770         if (filterBuilder_ == null) {
15771           filter_ = builderForValue.build();
15772           onChanged();
15773         } else {
15774           filterBuilder_.setMessage(builderForValue.build());
15775         }
15776         bitField0_ |= 0x00000010;
15777         return this;
15778       }
15779       /**
15780        * <code>optional .Filter filter = 5;</code>
15781        */
15782       public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
15783         if (filterBuilder_ == null) {
15784           if (((bitField0_ & 0x00000010) == 0x00000010) &&
15785               filter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) {
15786             filter_ =
15787               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial();
15788           } else {
15789             filter_ = value;
15790           }
15791           onChanged();
15792         } else {
15793           filterBuilder_.mergeFrom(value);
15794         }
15795         bitField0_ |= 0x00000010;
15796         return this;
15797       }
15798       /**
15799        * <code>optional .Filter filter = 5;</code>
15800        */
15801       public Builder clearFilter() {
15802         if (filterBuilder_ == null) {
15803           filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
15804           onChanged();
15805         } else {
15806           filterBuilder_.clear();
15807         }
15808         bitField0_ = (bitField0_ & ~0x00000010);
15809         return this;
15810       }
15811       /**
15812        * <code>optional .Filter filter = 5;</code>
15813        */
15814       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder getFilterBuilder() {
15815         bitField0_ |= 0x00000010;
15816         onChanged();
15817         return getFilterFieldBuilder().getBuilder();
15818       }
15819       /**
15820        * <code>optional .Filter filter = 5;</code>
15821        */
15822       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
15823         if (filterBuilder_ != null) {
15824           return filterBuilder_.getMessageOrBuilder();
15825         } else {
15826           return filter_;
15827         }
15828       }
15829       /**
15830        * <code>optional .Filter filter = 5;</code>
15831        */
15832       private com.google.protobuf.SingleFieldBuilder<
15833           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> 
15834           getFilterFieldBuilder() {
15835         if (filterBuilder_ == null) {
15836           filterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
15837               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>(
15838                   filter_,
15839                   getParentForChildren(),
15840                   isClean());
15841           filter_ = null;
15842         }
15843         return filterBuilder_;
15844       }
15845 
15846       // optional .TimeRange time_range = 6;
15847       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
15848       private com.google.protobuf.SingleFieldBuilder<
15849           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_;
15850       /**
15851        * <code>optional .TimeRange time_range = 6;</code>
15852        */
15853       public boolean hasTimeRange() {
15854         return ((bitField0_ & 0x00000020) == 0x00000020);
15855       }
15856       /**
15857        * <code>optional .TimeRange time_range = 6;</code>
15858        */
15859       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() {
15860         if (timeRangeBuilder_ == null) {
15861           return timeRange_;
15862         } else {
15863           return timeRangeBuilder_.getMessage();
15864         }
15865       }
15866       /**
15867        * <code>optional .TimeRange time_range = 6;</code>
15868        */
15869       public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) {
15870         if (timeRangeBuilder_ == null) {
15871           if (value == null) {
15872             throw new NullPointerException();
15873           }
15874           timeRange_ = value;
15875           onChanged();
15876         } else {
15877           timeRangeBuilder_.setMessage(value);
15878         }
15879         bitField0_ |= 0x00000020;
15880         return this;
15881       }
15882       /**
15883        * <code>optional .TimeRange time_range = 6;</code>
15884        */
15885       public Builder setTimeRange(
15886           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) {
15887         if (timeRangeBuilder_ == null) {
15888           timeRange_ = builderForValue.build();
15889           onChanged();
15890         } else {
15891           timeRangeBuilder_.setMessage(builderForValue.build());
15892         }
15893         bitField0_ |= 0x00000020;
15894         return this;
15895       }
15896       /**
15897        * <code>optional .TimeRange time_range = 6;</code>
15898        */
15899       public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) {
15900         if (timeRangeBuilder_ == null) {
15901           if (((bitField0_ & 0x00000020) == 0x00000020) &&
15902               timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) {
15903             timeRange_ =
15904               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial();
15905           } else {
15906             timeRange_ = value;
15907           }
15908           onChanged();
15909         } else {
15910           timeRangeBuilder_.mergeFrom(value);
15911         }
15912         bitField0_ |= 0x00000020;
15913         return this;
15914       }
15915       /**
15916        * <code>optional .TimeRange time_range = 6;</code>
15917        */
15918       public Builder clearTimeRange() {
15919         if (timeRangeBuilder_ == null) {
15920           timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
15921           onChanged();
15922         } else {
15923           timeRangeBuilder_.clear();
15924         }
15925         bitField0_ = (bitField0_ & ~0x00000020);
15926         return this;
15927       }
15928       /**
15929        * <code>optional .TimeRange time_range = 6;</code>
15930        */
15931       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() {
15932         bitField0_ |= 0x00000020;
15933         onChanged();
15934         return getTimeRangeFieldBuilder().getBuilder();
15935       }
15936       /**
15937        * <code>optional .TimeRange time_range = 6;</code>
15938        */
15939       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() {
15940         if (timeRangeBuilder_ != null) {
15941           return timeRangeBuilder_.getMessageOrBuilder();
15942         } else {
15943           return timeRange_;
15944         }
15945       }
15946       /**
15947        * <code>optional .TimeRange time_range = 6;</code>
15948        */
15949       private com.google.protobuf.SingleFieldBuilder<
15950           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> 
15951           getTimeRangeFieldBuilder() {
15952         if (timeRangeBuilder_ == null) {
15953           timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder<
15954               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>(
15955                   timeRange_,
15956                   getParentForChildren(),
15957                   isClean());
15958           timeRange_ = null;
15959         }
15960         return timeRangeBuilder_;
15961       }
15962 
15963       // optional uint32 max_versions = 7 [default = 1];
15964       private int maxVersions_ = 1;
15965       /**
15966        * <code>optional uint32 max_versions = 7 [default = 1];</code>
15967        */
15968       public boolean hasMaxVersions() {
15969         return ((bitField0_ & 0x00000040) == 0x00000040);
15970       }
15971       /**
15972        * <code>optional uint32 max_versions = 7 [default = 1];</code>
15973        */
15974       public int getMaxVersions() {
15975         return maxVersions_;
15976       }
15977       /**
15978        * <code>optional uint32 max_versions = 7 [default = 1];</code>
15979        */
15980       public Builder setMaxVersions(int value) {
15981         bitField0_ |= 0x00000040;
15982         maxVersions_ = value;
15983         onChanged();
15984         return this;
15985       }
15986       /**
15987        * <code>optional uint32 max_versions = 7 [default = 1];</code>
15988        */
15989       public Builder clearMaxVersions() {
15990         bitField0_ = (bitField0_ & ~0x00000040);
15991         maxVersions_ = 1;
15992         onChanged();
15993         return this;
15994       }
15995 
15996       // optional bool cache_blocks = 8 [default = true];
15997       private boolean cacheBlocks_ = true;
15998       /**
15999        * <code>optional bool cache_blocks = 8 [default = true];</code>
16000        */
16001       public boolean hasCacheBlocks() {
16002         return ((bitField0_ & 0x00000080) == 0x00000080);
16003       }
16004       /**
16005        * <code>optional bool cache_blocks = 8 [default = true];</code>
16006        */
16007       public boolean getCacheBlocks() {
16008         return cacheBlocks_;
16009       }
16010       /**
16011        * <code>optional bool cache_blocks = 8 [default = true];</code>
16012        */
16013       public Builder setCacheBlocks(boolean value) {
16014         bitField0_ |= 0x00000080;
16015         cacheBlocks_ = value;
16016         onChanged();
16017         return this;
16018       }
16019       /**
16020        * <code>optional bool cache_blocks = 8 [default = true];</code>
16021        */
16022       public Builder clearCacheBlocks() {
16023         bitField0_ = (bitField0_ & ~0x00000080);
16024         cacheBlocks_ = true;
16025         onChanged();
16026         return this;
16027       }
16028 
16029       // optional uint32 batch_size = 9;
16030       private int batchSize_ ;
16031       /**
16032        * <code>optional uint32 batch_size = 9;</code>
16033        */
16034       public boolean hasBatchSize() {
16035         return ((bitField0_ & 0x00000100) == 0x00000100);
16036       }
16037       /**
16038        * <code>optional uint32 batch_size = 9;</code>
16039        */
16040       public int getBatchSize() {
16041         return batchSize_;
16042       }
16043       /**
16044        * <code>optional uint32 batch_size = 9;</code>
16045        */
16046       public Builder setBatchSize(int value) {
16047         bitField0_ |= 0x00000100;
16048         batchSize_ = value;
16049         onChanged();
16050         return this;
16051       }
16052       /**
16053        * <code>optional uint32 batch_size = 9;</code>
16054        */
16055       public Builder clearBatchSize() {
16056         bitField0_ = (bitField0_ & ~0x00000100);
16057         batchSize_ = 0;
16058         onChanged();
16059         return this;
16060       }
16061 
16062       // optional uint64 max_result_size = 10;
16063       private long maxResultSize_ ;
16064       /**
16065        * <code>optional uint64 max_result_size = 10;</code>
16066        */
16067       public boolean hasMaxResultSize() {
16068         return ((bitField0_ & 0x00000200) == 0x00000200);
16069       }
16070       /**
16071        * <code>optional uint64 max_result_size = 10;</code>
16072        */
16073       public long getMaxResultSize() {
16074         return maxResultSize_;
16075       }
16076       /**
16077        * <code>optional uint64 max_result_size = 10;</code>
16078        */
16079       public Builder setMaxResultSize(long value) {
16080         bitField0_ |= 0x00000200;
16081         maxResultSize_ = value;
16082         onChanged();
16083         return this;
16084       }
16085       /**
16086        * <code>optional uint64 max_result_size = 10;</code>
16087        */
16088       public Builder clearMaxResultSize() {
16089         bitField0_ = (bitField0_ & ~0x00000200);
16090         maxResultSize_ = 0L;
16091         onChanged();
16092         return this;
16093       }
16094 
16095       // optional uint32 store_limit = 11;
16096       private int storeLimit_ ;
16097       /**
16098        * <code>optional uint32 store_limit = 11;</code>
16099        */
16100       public boolean hasStoreLimit() {
16101         return ((bitField0_ & 0x00000400) == 0x00000400);
16102       }
16103       /**
16104        * <code>optional uint32 store_limit = 11;</code>
16105        */
16106       public int getStoreLimit() {
16107         return storeLimit_;
16108       }
16109       /**
16110        * <code>optional uint32 store_limit = 11;</code>
16111        */
16112       public Builder setStoreLimit(int value) {
16113         bitField0_ |= 0x00000400;
16114         storeLimit_ = value;
16115         onChanged();
16116         return this;
16117       }
16118       /**
16119        * <code>optional uint32 store_limit = 11;</code>
16120        */
16121       public Builder clearStoreLimit() {
16122         bitField0_ = (bitField0_ & ~0x00000400);
16123         storeLimit_ = 0;
16124         onChanged();
16125         return this;
16126       }
16127 
16128       // optional uint32 store_offset = 12;
16129       private int storeOffset_ ;
16130       /**
16131        * <code>optional uint32 store_offset = 12;</code>
16132        */
16133       public boolean hasStoreOffset() {
16134         return ((bitField0_ & 0x00000800) == 0x00000800);
16135       }
16136       /**
16137        * <code>optional uint32 store_offset = 12;</code>
16138        */
16139       public int getStoreOffset() {
16140         return storeOffset_;
16141       }
16142       /**
16143        * <code>optional uint32 store_offset = 12;</code>
16144        */
16145       public Builder setStoreOffset(int value) {
16146         bitField0_ |= 0x00000800;
16147         storeOffset_ = value;
16148         onChanged();
16149         return this;
16150       }
16151       /**
16152        * <code>optional uint32 store_offset = 12;</code>
16153        */
16154       public Builder clearStoreOffset() {
16155         bitField0_ = (bitField0_ & ~0x00000800);
16156         storeOffset_ = 0;
16157         onChanged();
16158         return this;
16159       }
16160 
16161       // optional bool load_column_families_on_demand = 13;
16162       private boolean loadColumnFamiliesOnDemand_ ;
16163       /**
16164        * <code>optional bool load_column_families_on_demand = 13;</code>
16165        *
16166        * <pre>
16167        * DO NOT add defaults to load_column_families_on_demand. 
16168        * </pre>
16169        */
16170       public boolean hasLoadColumnFamiliesOnDemand() {
16171         return ((bitField0_ & 0x00001000) == 0x00001000);
16172       }
16173       /**
16174        * <code>optional bool load_column_families_on_demand = 13;</code>
16175        *
16176        * <pre>
16177        * DO NOT add defaults to load_column_families_on_demand. 
16178        * </pre>
16179        */
16180       public boolean getLoadColumnFamiliesOnDemand() {
16181         return loadColumnFamiliesOnDemand_;
16182       }
16183       /**
16184        * <code>optional bool load_column_families_on_demand = 13;</code>
16185        *
16186        * <pre>
16187        * DO NOT add defaults to load_column_families_on_demand. 
16188        * </pre>
16189        */
16190       public Builder setLoadColumnFamiliesOnDemand(boolean value) {
16191         bitField0_ |= 0x00001000;
16192         loadColumnFamiliesOnDemand_ = value;
16193         onChanged();
16194         return this;
16195       }
16196       /**
16197        * <code>optional bool load_column_families_on_demand = 13;</code>
16198        *
16199        * <pre>
16200        * DO NOT add defaults to load_column_families_on_demand. 
16201        * </pre>
16202        */
16203       public Builder clearLoadColumnFamiliesOnDemand() {
16204         bitField0_ = (bitField0_ & ~0x00001000);
16205         loadColumnFamiliesOnDemand_ = false;
16206         onChanged();
16207         return this;
16208       }
16209 
16210       // optional bool small = 14;
16211       private boolean small_ ;
16212       /**
16213        * <code>optional bool small = 14;</code>
16214        */
16215       public boolean hasSmall() {
16216         return ((bitField0_ & 0x00002000) == 0x00002000);
16217       }
16218       /**
16219        * <code>optional bool small = 14;</code>
16220        */
16221       public boolean getSmall() {
16222         return small_;
16223       }
16224       /**
16225        * <code>optional bool small = 14;</code>
16226        */
16227       public Builder setSmall(boolean value) {
16228         bitField0_ |= 0x00002000;
16229         small_ = value;
16230         onChanged();
16231         return this;
16232       }
16233       /**
16234        * <code>optional bool small = 14;</code>
16235        */
16236       public Builder clearSmall() {
16237         bitField0_ = (bitField0_ & ~0x00002000);
16238         small_ = false;
16239         onChanged();
16240         return this;
16241       }
16242 
16243       // optional bool reversed = 15 [default = false];
16244       private boolean reversed_ ;
16245       /**
16246        * <code>optional bool reversed = 15 [default = false];</code>
16247        */
16248       public boolean hasReversed() {
16249         return ((bitField0_ & 0x00004000) == 0x00004000);
16250       }
16251       /**
16252        * <code>optional bool reversed = 15 [default = false];</code>
16253        */
16254       public boolean getReversed() {
16255         return reversed_;
16256       }
16257       /**
16258        * <code>optional bool reversed = 15 [default = false];</code>
16259        */
16260       public Builder setReversed(boolean value) {
16261         bitField0_ |= 0x00004000;
16262         reversed_ = value;
16263         onChanged();
16264         return this;
16265       }
16266       /**
16267        * <code>optional bool reversed = 15 [default = false];</code>
16268        */
16269       public Builder clearReversed() {
16270         bitField0_ = (bitField0_ & ~0x00004000);
16271         reversed_ = false;
16272         onChanged();
16273         return this;
16274       }
16275 
16276       // optional .Consistency consistency = 16 [default = STRONG];
16277       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
16278       /**
16279        * <code>optional .Consistency consistency = 16 [default = STRONG];</code>
16280        */
16281       public boolean hasConsistency() {
16282         return ((bitField0_ & 0x00008000) == 0x00008000);
16283       }
16284       /**
16285        * <code>optional .Consistency consistency = 16 [default = STRONG];</code>
16286        */
16287       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency() {
16288         return consistency_;
16289       }
16290       /**
16291        * <code>optional .Consistency consistency = 16 [default = STRONG];</code>
16292        */
16293       public Builder setConsistency(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency value) {
16294         if (value == null) {
16295           throw new NullPointerException();
16296         }
16297         bitField0_ |= 0x00008000;
16298         consistency_ = value;
16299         onChanged();
16300         return this;
16301       }
16302       /**
16303        * <code>optional .Consistency consistency = 16 [default = STRONG];</code>
16304        */
16305       public Builder clearConsistency() {
16306         bitField0_ = (bitField0_ & ~0x00008000);
16307         consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
16308         onChanged();
16309         return this;
16310       }
16311 
16312       // optional uint32 caching = 17;
16313       private int caching_ ;
16314       /**
16315        * <code>optional uint32 caching = 17;</code>
16316        */
16317       public boolean hasCaching() {
16318         return ((bitField0_ & 0x00010000) == 0x00010000);
16319       }
16320       /**
16321        * <code>optional uint32 caching = 17;</code>
16322        */
16323       public int getCaching() {
16324         return caching_;
16325       }
16326       /**
16327        * <code>optional uint32 caching = 17;</code>
16328        */
16329       public Builder setCaching(int value) {
16330         bitField0_ |= 0x00010000;
16331         caching_ = value;
16332         onChanged();
16333         return this;
16334       }
16335       /**
16336        * <code>optional uint32 caching = 17;</code>
16337        */
16338       public Builder clearCaching() {
16339         bitField0_ = (bitField0_ & ~0x00010000);
16340         caching_ = 0;
16341         onChanged();
16342         return this;
16343       }
16344 
16345       // @@protoc_insertion_point(builder_scope:Scan)
16346     }
16347 
16348     static {
16349       defaultInstance = new Scan(true);
16350       defaultInstance.initFields();
16351     }
16352 
16353     // @@protoc_insertion_point(class_scope:Scan)
16354   }
16355 
16356   public interface ScanRequestOrBuilder
16357       extends com.google.protobuf.MessageOrBuilder {
16358 
16359     // optional .RegionSpecifier region = 1;
16360     /**
16361      * <code>optional .RegionSpecifier region = 1;</code>
16362      */
16363     boolean hasRegion();
16364     /**
16365      * <code>optional .RegionSpecifier region = 1;</code>
16366      */
16367     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
16368     /**
16369      * <code>optional .RegionSpecifier region = 1;</code>
16370      */
16371     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
16372 
16373     // optional .Scan scan = 2;
16374     /**
16375      * <code>optional .Scan scan = 2;</code>
16376      */
16377     boolean hasScan();
16378     /**
16379      * <code>optional .Scan scan = 2;</code>
16380      */
16381     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan();
16382     /**
16383      * <code>optional .Scan scan = 2;</code>
16384      */
16385     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder();
16386 
16387     // optional uint64 scanner_id = 3;
16388     /**
16389      * <code>optional uint64 scanner_id = 3;</code>
16390      */
16391     boolean hasScannerId();
16392     /**
16393      * <code>optional uint64 scanner_id = 3;</code>
16394      */
16395     long getScannerId();
16396 
16397     // optional uint32 number_of_rows = 4;
16398     /**
16399      * <code>optional uint32 number_of_rows = 4;</code>
16400      */
16401     boolean hasNumberOfRows();
16402     /**
16403      * <code>optional uint32 number_of_rows = 4;</code>
16404      */
16405     int getNumberOfRows();
16406 
16407     // optional bool close_scanner = 5;
16408     /**
16409      * <code>optional bool close_scanner = 5;</code>
16410      */
16411     boolean hasCloseScanner();
16412     /**
16413      * <code>optional bool close_scanner = 5;</code>
16414      */
16415     boolean getCloseScanner();
16416 
16417     // optional uint64 next_call_seq = 6;
16418     /**
16419      * <code>optional uint64 next_call_seq = 6;</code>
16420      */
16421     boolean hasNextCallSeq();
16422     /**
16423      * <code>optional uint64 next_call_seq = 6;</code>
16424      */
16425     long getNextCallSeq();
16426 
16427     // optional bool client_handles_partials = 7;
16428     /**
16429      * <code>optional bool client_handles_partials = 7;</code>
16430      */
16431     boolean hasClientHandlesPartials();
16432     /**
16433      * <code>optional bool client_handles_partials = 7;</code>
16434      */
16435     boolean getClientHandlesPartials();
16436 
16437     // optional bool client_handles_heartbeats = 8;
16438     /**
16439      * <code>optional bool client_handles_heartbeats = 8;</code>
16440      */
16441     boolean hasClientHandlesHeartbeats();
16442     /**
16443      * <code>optional bool client_handles_heartbeats = 8;</code>
16444      */
16445     boolean getClientHandlesHeartbeats();
16446   }
16447   /**
16448    * Protobuf type {@code ScanRequest}
16449    *
16450    * <pre>
16451    **
16452    * A scan request. Initially, it should specify a scan. Later on, you
16453    * can use the scanner id returned to fetch result batches with a different
16454    * scan request.
16455    *
16456    * The scanner will remain open if there are more results, and it's not
16457    * asked to be closed explicitly.
16458    *
16459    * You can fetch the results and ask the scanner to be closed to save
16460    * a trip if you are not interested in remaining results.
16461    * </pre>
16462    */
16463   public static final class ScanRequest extends
16464       com.google.protobuf.GeneratedMessage
16465       implements ScanRequestOrBuilder {
16466     // Use ScanRequest.newBuilder() to construct.
16467     private ScanRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
16468       super(builder);
16469       this.unknownFields = builder.getUnknownFields();
16470     }
16471     private ScanRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
16472 
16473     private static final ScanRequest defaultInstance;
16474     public static ScanRequest getDefaultInstance() {
16475       return defaultInstance;
16476     }
16477 
16478     public ScanRequest getDefaultInstanceForType() {
16479       return defaultInstance;
16480     }
16481 
16482     private final com.google.protobuf.UnknownFieldSet unknownFields;
16483     @java.lang.Override
16484     public final com.google.protobuf.UnknownFieldSet
16485         getUnknownFields() {
16486       return this.unknownFields;
16487     }
16488     private ScanRequest(
16489         com.google.protobuf.CodedInputStream input,
16490         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16491         throws com.google.protobuf.InvalidProtocolBufferException {
16492       initFields();
16493       int mutable_bitField0_ = 0;
16494       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
16495           com.google.protobuf.UnknownFieldSet.newBuilder();
16496       try {
16497         boolean done = false;
16498         while (!done) {
16499           int tag = input.readTag();
16500           switch (tag) {
16501             case 0:
16502               done = true;
16503               break;
16504             default: {
16505               if (!parseUnknownField(input, unknownFields,
16506                                      extensionRegistry, tag)) {
16507                 done = true;
16508               }
16509               break;
16510             }
16511             case 10: {
16512               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
16513               if (((bitField0_ & 0x00000001) == 0x00000001)) {
16514                 subBuilder = region_.toBuilder();
16515               }
16516               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
16517               if (subBuilder != null) {
16518                 subBuilder.mergeFrom(region_);
16519                 region_ = subBuilder.buildPartial();
16520               }
16521               bitField0_ |= 0x00000001;
16522               break;
16523             }
16524             case 18: {
16525               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = null;
16526               if (((bitField0_ & 0x00000002) == 0x00000002)) {
16527                 subBuilder = scan_.toBuilder();
16528               }
16529               scan_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.PARSER, extensionRegistry);
16530               if (subBuilder != null) {
16531                 subBuilder.mergeFrom(scan_);
16532                 scan_ = subBuilder.buildPartial();
16533               }
16534               bitField0_ |= 0x00000002;
16535               break;
16536             }
16537             case 24: {
16538               bitField0_ |= 0x00000004;
16539               scannerId_ = input.readUInt64();
16540               break;
16541             }
16542             case 32: {
16543               bitField0_ |= 0x00000008;
16544               numberOfRows_ = input.readUInt32();
16545               break;
16546             }
16547             case 40: {
16548               bitField0_ |= 0x00000010;
16549               closeScanner_ = input.readBool();
16550               break;
16551             }
16552             case 48: {
16553               bitField0_ |= 0x00000020;
16554               nextCallSeq_ = input.readUInt64();
16555               break;
16556             }
16557             case 56: {
16558               bitField0_ |= 0x00000040;
16559               clientHandlesPartials_ = input.readBool();
16560               break;
16561             }
16562             case 64: {
16563               bitField0_ |= 0x00000080;
16564               clientHandlesHeartbeats_ = input.readBool();
16565               break;
16566             }
16567           }
16568         }
16569       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
16570         throw e.setUnfinishedMessage(this);
16571       } catch (java.io.IOException e) {
16572         throw new com.google.protobuf.InvalidProtocolBufferException(
16573             e.getMessage()).setUnfinishedMessage(this);
16574       } finally {
16575         this.unknownFields = unknownFields.build();
16576         makeExtensionsImmutable();
16577       }
16578     }
16579     public static final com.google.protobuf.Descriptors.Descriptor
16580         getDescriptor() {
16581       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_descriptor;
16582     }
16583 
16584     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
16585         internalGetFieldAccessorTable() {
16586       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_fieldAccessorTable
16587           .ensureFieldAccessorsInitialized(
16588               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder.class);
16589     }
16590 
16591     public static com.google.protobuf.Parser<ScanRequest> PARSER =
16592         new com.google.protobuf.AbstractParser<ScanRequest>() {
16593       public ScanRequest parsePartialFrom(
16594           com.google.protobuf.CodedInputStream input,
16595           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16596           throws com.google.protobuf.InvalidProtocolBufferException {
16597         return new ScanRequest(input, extensionRegistry);
16598       }
16599     };
16600 
16601     @java.lang.Override
16602     public com.google.protobuf.Parser<ScanRequest> getParserForType() {
16603       return PARSER;
16604     }
16605 
16606     private int bitField0_;
16607     // optional .RegionSpecifier region = 1;
16608     public static final int REGION_FIELD_NUMBER = 1;
16609     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
16610     /**
16611      * <code>optional .RegionSpecifier region = 1;</code>
16612      */
16613     public boolean hasRegion() {
16614       return ((bitField0_ & 0x00000001) == 0x00000001);
16615     }
16616     /**
16617      * <code>optional .RegionSpecifier region = 1;</code>
16618      */
16619     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
16620       return region_;
16621     }
16622     /**
16623      * <code>optional .RegionSpecifier region = 1;</code>
16624      */
16625     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
16626       return region_;
16627     }
16628 
16629     // optional .Scan scan = 2;
16630     public static final int SCAN_FIELD_NUMBER = 2;
16631     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_;
16632     /**
16633      * <code>optional .Scan scan = 2;</code>
16634      */
16635     public boolean hasScan() {
16636       return ((bitField0_ & 0x00000002) == 0x00000002);
16637     }
16638     /**
16639      * <code>optional .Scan scan = 2;</code>
16640      */
16641     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
16642       return scan_;
16643     }
16644     /**
16645      * <code>optional .Scan scan = 2;</code>
16646      */
16647     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
16648       return scan_;
16649     }
16650 
16651     // optional uint64 scanner_id = 3;
16652     public static final int SCANNER_ID_FIELD_NUMBER = 3;
16653     private long scannerId_;
16654     /**
16655      * <code>optional uint64 scanner_id = 3;</code>
16656      */
16657     public boolean hasScannerId() {
16658       return ((bitField0_ & 0x00000004) == 0x00000004);
16659     }
16660     /**
16661      * <code>optional uint64 scanner_id = 3;</code>
16662      */
16663     public long getScannerId() {
16664       return scannerId_;
16665     }
16666 
16667     // optional uint32 number_of_rows = 4;
16668     public static final int NUMBER_OF_ROWS_FIELD_NUMBER = 4;
16669     private int numberOfRows_;
16670     /**
16671      * <code>optional uint32 number_of_rows = 4;</code>
16672      */
16673     public boolean hasNumberOfRows() {
16674       return ((bitField0_ & 0x00000008) == 0x00000008);
16675     }
16676     /**
16677      * <code>optional uint32 number_of_rows = 4;</code>
16678      */
16679     public int getNumberOfRows() {
16680       return numberOfRows_;
16681     }
16682 
16683     // optional bool close_scanner = 5;
16684     public static final int CLOSE_SCANNER_FIELD_NUMBER = 5;
16685     private boolean closeScanner_;
16686     /**
16687      * <code>optional bool close_scanner = 5;</code>
16688      */
16689     public boolean hasCloseScanner() {
16690       return ((bitField0_ & 0x00000010) == 0x00000010);
16691     }
16692     /**
16693      * <code>optional bool close_scanner = 5;</code>
16694      */
16695     public boolean getCloseScanner() {
16696       return closeScanner_;
16697     }
16698 
16699     // optional uint64 next_call_seq = 6;
16700     public static final int NEXT_CALL_SEQ_FIELD_NUMBER = 6;
16701     private long nextCallSeq_;
16702     /**
16703      * <code>optional uint64 next_call_seq = 6;</code>
16704      */
16705     public boolean hasNextCallSeq() {
16706       return ((bitField0_ & 0x00000020) == 0x00000020);
16707     }
16708     /**
16709      * <code>optional uint64 next_call_seq = 6;</code>
16710      */
16711     public long getNextCallSeq() {
16712       return nextCallSeq_;
16713     }
16714 
16715     // optional bool client_handles_partials = 7;
16716     public static final int CLIENT_HANDLES_PARTIALS_FIELD_NUMBER = 7;
16717     private boolean clientHandlesPartials_;
16718     /**
16719      * <code>optional bool client_handles_partials = 7;</code>
16720      */
16721     public boolean hasClientHandlesPartials() {
16722       return ((bitField0_ & 0x00000040) == 0x00000040);
16723     }
16724     /**
16725      * <code>optional bool client_handles_partials = 7;</code>
16726      */
16727     public boolean getClientHandlesPartials() {
16728       return clientHandlesPartials_;
16729     }
16730 
16731     // optional bool client_handles_heartbeats = 8;
16732     public static final int CLIENT_HANDLES_HEARTBEATS_FIELD_NUMBER = 8;
16733     private boolean clientHandlesHeartbeats_;
16734     /**
16735      * <code>optional bool client_handles_heartbeats = 8;</code>
16736      */
16737     public boolean hasClientHandlesHeartbeats() {
16738       return ((bitField0_ & 0x00000080) == 0x00000080);
16739     }
16740     /**
16741      * <code>optional bool client_handles_heartbeats = 8;</code>
16742      */
16743     public boolean getClientHandlesHeartbeats() {
16744       return clientHandlesHeartbeats_;
16745     }
16746 
16747     private void initFields() {
16748       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
16749       scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
16750       scannerId_ = 0L;
16751       numberOfRows_ = 0;
16752       closeScanner_ = false;
16753       nextCallSeq_ = 0L;
16754       clientHandlesPartials_ = false;
16755       clientHandlesHeartbeats_ = false;
16756     }
16757     private byte memoizedIsInitialized = -1;
16758     public final boolean isInitialized() {
16759       byte isInitialized = memoizedIsInitialized;
16760       if (isInitialized != -1) return isInitialized == 1;
16761 
16762       if (hasRegion()) {
16763         if (!getRegion().isInitialized()) {
16764           memoizedIsInitialized = 0;
16765           return false;
16766         }
16767       }
16768       if (hasScan()) {
16769         if (!getScan().isInitialized()) {
16770           memoizedIsInitialized = 0;
16771           return false;
16772         }
16773       }
16774       memoizedIsInitialized = 1;
16775       return true;
16776     }
16777 
16778     public void writeTo(com.google.protobuf.CodedOutputStream output)
16779                         throws java.io.IOException {
16780       getSerializedSize();
16781       if (((bitField0_ & 0x00000001) == 0x00000001)) {
16782         output.writeMessage(1, region_);
16783       }
16784       if (((bitField0_ & 0x00000002) == 0x00000002)) {
16785         output.writeMessage(2, scan_);
16786       }
16787       if (((bitField0_ & 0x00000004) == 0x00000004)) {
16788         output.writeUInt64(3, scannerId_);
16789       }
16790       if (((bitField0_ & 0x00000008) == 0x00000008)) {
16791         output.writeUInt32(4, numberOfRows_);
16792       }
16793       if (((bitField0_ & 0x00000010) == 0x00000010)) {
16794         output.writeBool(5, closeScanner_);
16795       }
16796       if (((bitField0_ & 0x00000020) == 0x00000020)) {
16797         output.writeUInt64(6, nextCallSeq_);
16798       }
16799       if (((bitField0_ & 0x00000040) == 0x00000040)) {
16800         output.writeBool(7, clientHandlesPartials_);
16801       }
16802       if (((bitField0_ & 0x00000080) == 0x00000080)) {
16803         output.writeBool(8, clientHandlesHeartbeats_);
16804       }
16805       getUnknownFields().writeTo(output);
16806     }
16807 
16808     private int memoizedSerializedSize = -1;
16809     public int getSerializedSize() {
16810       int size = memoizedSerializedSize;
16811       if (size != -1) return size;
16812 
16813       size = 0;
16814       if (((bitField0_ & 0x00000001) == 0x00000001)) {
16815         size += com.google.protobuf.CodedOutputStream
16816           .computeMessageSize(1, region_);
16817       }
16818       if (((bitField0_ & 0x00000002) == 0x00000002)) {
16819         size += com.google.protobuf.CodedOutputStream
16820           .computeMessageSize(2, scan_);
16821       }
16822       if (((bitField0_ & 0x00000004) == 0x00000004)) {
16823         size += com.google.protobuf.CodedOutputStream
16824           .computeUInt64Size(3, scannerId_);
16825       }
16826       if (((bitField0_ & 0x00000008) == 0x00000008)) {
16827         size += com.google.protobuf.CodedOutputStream
16828           .computeUInt32Size(4, numberOfRows_);
16829       }
16830       if (((bitField0_ & 0x00000010) == 0x00000010)) {
16831         size += com.google.protobuf.CodedOutputStream
16832           .computeBoolSize(5, closeScanner_);
16833       }
16834       if (((bitField0_ & 0x00000020) == 0x00000020)) {
16835         size += com.google.protobuf.CodedOutputStream
16836           .computeUInt64Size(6, nextCallSeq_);
16837       }
16838       if (((bitField0_ & 0x00000040) == 0x00000040)) {
16839         size += com.google.protobuf.CodedOutputStream
16840           .computeBoolSize(7, clientHandlesPartials_);
16841       }
16842       if (((bitField0_ & 0x00000080) == 0x00000080)) {
16843         size += com.google.protobuf.CodedOutputStream
16844           .computeBoolSize(8, clientHandlesHeartbeats_);
16845       }
16846       size += getUnknownFields().getSerializedSize();
16847       memoizedSerializedSize = size;
16848       return size;
16849     }
16850 
16851     private static final long serialVersionUID = 0L;
16852     @java.lang.Override
16853     protected java.lang.Object writeReplace()
16854         throws java.io.ObjectStreamException {
16855       return super.writeReplace();
16856     }
16857 
16858     @java.lang.Override
16859     public boolean equals(final java.lang.Object obj) {
16860       if (obj == this) {
16861        return true;
16862       }
16863       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)) {
16864         return super.equals(obj);
16865       }
16866       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) obj;
16867 
16868       boolean result = true;
16869       result = result && (hasRegion() == other.hasRegion());
16870       if (hasRegion()) {
16871         result = result && getRegion()
16872             .equals(other.getRegion());
16873       }
16874       result = result && (hasScan() == other.hasScan());
16875       if (hasScan()) {
16876         result = result && getScan()
16877             .equals(other.getScan());
16878       }
16879       result = result && (hasScannerId() == other.hasScannerId());
16880       if (hasScannerId()) {
16881         result = result && (getScannerId()
16882             == other.getScannerId());
16883       }
16884       result = result && (hasNumberOfRows() == other.hasNumberOfRows());
16885       if (hasNumberOfRows()) {
16886         result = result && (getNumberOfRows()
16887             == other.getNumberOfRows());
16888       }
16889       result = result && (hasCloseScanner() == other.hasCloseScanner());
16890       if (hasCloseScanner()) {
16891         result = result && (getCloseScanner()
16892             == other.getCloseScanner());
16893       }
16894       result = result && (hasNextCallSeq() == other.hasNextCallSeq());
16895       if (hasNextCallSeq()) {
16896         result = result && (getNextCallSeq()
16897             == other.getNextCallSeq());
16898       }
16899       result = result && (hasClientHandlesPartials() == other.hasClientHandlesPartials());
16900       if (hasClientHandlesPartials()) {
16901         result = result && (getClientHandlesPartials()
16902             == other.getClientHandlesPartials());
16903       }
16904       result = result && (hasClientHandlesHeartbeats() == other.hasClientHandlesHeartbeats());
16905       if (hasClientHandlesHeartbeats()) {
16906         result = result && (getClientHandlesHeartbeats()
16907             == other.getClientHandlesHeartbeats());
16908       }
16909       result = result &&
16910           getUnknownFields().equals(other.getUnknownFields());
16911       return result;
16912     }
16913 
16914     private int memoizedHashCode = 0;
16915     @java.lang.Override
16916     public int hashCode() {
16917       if (memoizedHashCode != 0) {
16918         return memoizedHashCode;
16919       }
16920       int hash = 41;
16921       hash = (19 * hash) + getDescriptorForType().hashCode();
16922       if (hasRegion()) {
16923         hash = (37 * hash) + REGION_FIELD_NUMBER;
16924         hash = (53 * hash) + getRegion().hashCode();
16925       }
16926       if (hasScan()) {
16927         hash = (37 * hash) + SCAN_FIELD_NUMBER;
16928         hash = (53 * hash) + getScan().hashCode();
16929       }
16930       if (hasScannerId()) {
16931         hash = (37 * hash) + SCANNER_ID_FIELD_NUMBER;
16932         hash = (53 * hash) + hashLong(getScannerId());
16933       }
16934       if (hasNumberOfRows()) {
16935         hash = (37 * hash) + NUMBER_OF_ROWS_FIELD_NUMBER;
16936         hash = (53 * hash) + getNumberOfRows();
16937       }
16938       if (hasCloseScanner()) {
16939         hash = (37 * hash) + CLOSE_SCANNER_FIELD_NUMBER;
16940         hash = (53 * hash) + hashBoolean(getCloseScanner());
16941       }
16942       if (hasNextCallSeq()) {
16943         hash = (37 * hash) + NEXT_CALL_SEQ_FIELD_NUMBER;
16944         hash = (53 * hash) + hashLong(getNextCallSeq());
16945       }
16946       if (hasClientHandlesPartials()) {
16947         hash = (37 * hash) + CLIENT_HANDLES_PARTIALS_FIELD_NUMBER;
16948         hash = (53 * hash) + hashBoolean(getClientHandlesPartials());
16949       }
16950       if (hasClientHandlesHeartbeats()) {
16951         hash = (37 * hash) + CLIENT_HANDLES_HEARTBEATS_FIELD_NUMBER;
16952         hash = (53 * hash) + hashBoolean(getClientHandlesHeartbeats());
16953       }
16954       hash = (29 * hash) + getUnknownFields().hashCode();
16955       memoizedHashCode = hash;
16956       return hash;
16957     }
16958 
16959     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(
16960         com.google.protobuf.ByteString data)
16961         throws com.google.protobuf.InvalidProtocolBufferException {
16962       return PARSER.parseFrom(data);
16963     }
16964     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(
16965         com.google.protobuf.ByteString data,
16966         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16967         throws com.google.protobuf.InvalidProtocolBufferException {
16968       return PARSER.parseFrom(data, extensionRegistry);
16969     }
16970     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(byte[] data)
16971         throws com.google.protobuf.InvalidProtocolBufferException {
16972       return PARSER.parseFrom(data);
16973     }
16974     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(
16975         byte[] data,
16976         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16977         throws com.google.protobuf.InvalidProtocolBufferException {
16978       return PARSER.parseFrom(data, extensionRegistry);
16979     }
16980     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(java.io.InputStream input)
16981         throws java.io.IOException {
16982       return PARSER.parseFrom(input);
16983     }
16984     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(
16985         java.io.InputStream input,
16986         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16987         throws java.io.IOException {
16988       return PARSER.parseFrom(input, extensionRegistry);
16989     }
16990     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseDelimitedFrom(java.io.InputStream input)
16991         throws java.io.IOException {
16992       return PARSER.parseDelimitedFrom(input);
16993     }
16994     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseDelimitedFrom(
16995         java.io.InputStream input,
16996         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16997         throws java.io.IOException {
16998       return PARSER.parseDelimitedFrom(input, extensionRegistry);
16999     }
17000     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(
17001         com.google.protobuf.CodedInputStream input)
17002         throws java.io.IOException {
17003       return PARSER.parseFrom(input);
17004     }
17005     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(
17006         com.google.protobuf.CodedInputStream input,
17007         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17008         throws java.io.IOException {
17009       return PARSER.parseFrom(input, extensionRegistry);
17010     }
17011 
17012     public static Builder newBuilder() { return Builder.create(); }
17013     public Builder newBuilderForType() { return newBuilder(); }
17014     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest prototype) {
17015       return newBuilder().mergeFrom(prototype);
17016     }
17017     public Builder toBuilder() { return newBuilder(this); }
17018 
17019     @java.lang.Override
17020     protected Builder newBuilderForType(
17021         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
17022       Builder builder = new Builder(parent);
17023       return builder;
17024     }
17025     /**
17026      * Protobuf type {@code ScanRequest}
17027      *
17028      * <pre>
17029      **
17030      * A scan request. Initially, it should specify a scan. Later on, you
17031      * can use the scanner id returned to fetch result batches with a different
17032      * scan request.
17033      *
17034      * The scanner will remain open if there are more results, and it's not
17035      * asked to be closed explicitly.
17036      *
17037      * You can fetch the results and ask the scanner to be closed to save
17038      * a trip if you are not interested in remaining results.
17039      * </pre>
17040      */
17041     public static final class Builder extends
17042         com.google.protobuf.GeneratedMessage.Builder<Builder>
17043        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequestOrBuilder {
17044       public static final com.google.protobuf.Descriptors.Descriptor
17045           getDescriptor() {
17046         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_descriptor;
17047       }
17048 
17049       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
17050           internalGetFieldAccessorTable() {
17051         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_fieldAccessorTable
17052             .ensureFieldAccessorsInitialized(
17053                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder.class);
17054       }
17055 
17056       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.newBuilder()
17057       private Builder() {
17058         maybeForceBuilderInitialization();
17059       }
17060 
17061       private Builder(
17062           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
17063         super(parent);
17064         maybeForceBuilderInitialization();
17065       }
17066       private void maybeForceBuilderInitialization() {
17067         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
17068           getRegionFieldBuilder();
17069           getScanFieldBuilder();
17070         }
17071       }
17072       private static Builder create() {
17073         return new Builder();
17074       }
17075 
17076       public Builder clear() {
17077         super.clear();
17078         if (regionBuilder_ == null) {
17079           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
17080         } else {
17081           regionBuilder_.clear();
17082         }
17083         bitField0_ = (bitField0_ & ~0x00000001);
17084         if (scanBuilder_ == null) {
17085           scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
17086         } else {
17087           scanBuilder_.clear();
17088         }
17089         bitField0_ = (bitField0_ & ~0x00000002);
17090         scannerId_ = 0L;
17091         bitField0_ = (bitField0_ & ~0x00000004);
17092         numberOfRows_ = 0;
17093         bitField0_ = (bitField0_ & ~0x00000008);
17094         closeScanner_ = false;
17095         bitField0_ = (bitField0_ & ~0x00000010);
17096         nextCallSeq_ = 0L;
17097         bitField0_ = (bitField0_ & ~0x00000020);
17098         clientHandlesPartials_ = false;
17099         bitField0_ = (bitField0_ & ~0x00000040);
17100         clientHandlesHeartbeats_ = false;
17101         bitField0_ = (bitField0_ & ~0x00000080);
17102         return this;
17103       }
17104 
17105       public Builder clone() {
17106         return create().mergeFrom(buildPartial());
17107       }
17108 
17109       public com.google.protobuf.Descriptors.Descriptor
17110           getDescriptorForType() {
17111         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_descriptor;
17112       }
17113 
17114       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest getDefaultInstanceForType() {
17115         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance();
17116       }
17117 
17118       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest build() {
17119         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = buildPartial();
17120         if (!result.isInitialized()) {
17121           throw newUninitializedMessageException(result);
17122         }
17123         return result;
17124       }
17125 
17126       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest buildPartial() {
17127         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest(this);
17128         int from_bitField0_ = bitField0_;
17129         int to_bitField0_ = 0;
17130         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
17131           to_bitField0_ |= 0x00000001;
17132         }
17133         if (regionBuilder_ == null) {
17134           result.region_ = region_;
17135         } else {
17136           result.region_ = regionBuilder_.build();
17137         }
17138         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
17139           to_bitField0_ |= 0x00000002;
17140         }
17141         if (scanBuilder_ == null) {
17142           result.scan_ = scan_;
17143         } else {
17144           result.scan_ = scanBuilder_.build();
17145         }
17146         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
17147           to_bitField0_ |= 0x00000004;
17148         }
17149         result.scannerId_ = scannerId_;
17150         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
17151           to_bitField0_ |= 0x00000008;
17152         }
17153         result.numberOfRows_ = numberOfRows_;
17154         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
17155           to_bitField0_ |= 0x00000010;
17156         }
17157         result.closeScanner_ = closeScanner_;
17158         if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
17159           to_bitField0_ |= 0x00000020;
17160         }
17161         result.nextCallSeq_ = nextCallSeq_;
17162         if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
17163           to_bitField0_ |= 0x00000040;
17164         }
17165         result.clientHandlesPartials_ = clientHandlesPartials_;
17166         if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
17167           to_bitField0_ |= 0x00000080;
17168         }
17169         result.clientHandlesHeartbeats_ = clientHandlesHeartbeats_;
17170         result.bitField0_ = to_bitField0_;
17171         onBuilt();
17172         return result;
17173       }
17174 
17175       public Builder mergeFrom(com.google.protobuf.Message other) {
17176         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) {
17177           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)other);
17178         } else {
17179           super.mergeFrom(other);
17180           return this;
17181         }
17182       }
17183 
17184       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest other) {
17185         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance()) return this;
17186         if (other.hasRegion()) {
17187           mergeRegion(other.getRegion());
17188         }
17189         if (other.hasScan()) {
17190           mergeScan(other.getScan());
17191         }
17192         if (other.hasScannerId()) {
17193           setScannerId(other.getScannerId());
17194         }
17195         if (other.hasNumberOfRows()) {
17196           setNumberOfRows(other.getNumberOfRows());
17197         }
17198         if (other.hasCloseScanner()) {
17199           setCloseScanner(other.getCloseScanner());
17200         }
17201         if (other.hasNextCallSeq()) {
17202           setNextCallSeq(other.getNextCallSeq());
17203         }
17204         if (other.hasClientHandlesPartials()) {
17205           setClientHandlesPartials(other.getClientHandlesPartials());
17206         }
17207         if (other.hasClientHandlesHeartbeats()) {
17208           setClientHandlesHeartbeats(other.getClientHandlesHeartbeats());
17209         }
17210         this.mergeUnknownFields(other.getUnknownFields());
17211         return this;
17212       }
17213 
17214       public final boolean isInitialized() {
17215         if (hasRegion()) {
17216           if (!getRegion().isInitialized()) {
17217             
17218             return false;
17219           }
17220         }
17221         if (hasScan()) {
17222           if (!getScan().isInitialized()) {
17223             
17224             return false;
17225           }
17226         }
17227         return true;
17228       }
17229 
17230       public Builder mergeFrom(
17231           com.google.protobuf.CodedInputStream input,
17232           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17233           throws java.io.IOException {
17234         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parsedMessage = null;
17235         try {
17236           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
17237         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
17238           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) e.getUnfinishedMessage();
17239           throw e;
17240         } finally {
17241           if (parsedMessage != null) {
17242             mergeFrom(parsedMessage);
17243           }
17244         }
17245         return this;
17246       }
17247       private int bitField0_;
17248 
17249       // optional .RegionSpecifier region = 1;
17250       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
17251       private com.google.protobuf.SingleFieldBuilder<
17252           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
17253       /**
17254        * <code>optional .RegionSpecifier region = 1;</code>
17255        */
17256       public boolean hasRegion() {
17257         return ((bitField0_ & 0x00000001) == 0x00000001);
17258       }
17259       /**
17260        * <code>optional .RegionSpecifier region = 1;</code>
17261        */
17262       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
17263         if (regionBuilder_ == null) {
17264           return region_;
17265         } else {
17266           return regionBuilder_.getMessage();
17267         }
17268       }
17269       /**
17270        * <code>optional .RegionSpecifier region = 1;</code>
17271        */
17272       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
17273         if (regionBuilder_ == null) {
17274           if (value == null) {
17275             throw new NullPointerException();
17276           }
17277           region_ = value;
17278           onChanged();
17279         } else {
17280           regionBuilder_.setMessage(value);
17281         }
17282         bitField0_ |= 0x00000001;
17283         return this;
17284       }
17285       /**
17286        * <code>optional .RegionSpecifier region = 1;</code>
17287        */
17288       public Builder setRegion(
17289           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
17290         if (regionBuilder_ == null) {
17291           region_ = builderForValue.build();
17292           onChanged();
17293         } else {
17294           regionBuilder_.setMessage(builderForValue.build());
17295         }
17296         bitField0_ |= 0x00000001;
17297         return this;
17298       }
17299       /**
17300        * <code>optional .RegionSpecifier region = 1;</code>
17301        */
17302       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
17303         if (regionBuilder_ == null) {
17304           if (((bitField0_ & 0x00000001) == 0x00000001) &&
17305               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
17306             region_ =
17307               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
17308           } else {
17309             region_ = value;
17310           }
17311           onChanged();
17312         } else {
17313           regionBuilder_.mergeFrom(value);
17314         }
17315         bitField0_ |= 0x00000001;
17316         return this;
17317       }
17318       /**
17319        * <code>optional .RegionSpecifier region = 1;</code>
17320        */
17321       public Builder clearRegion() {
17322         if (regionBuilder_ == null) {
17323           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
17324           onChanged();
17325         } else {
17326           regionBuilder_.clear();
17327         }
17328         bitField0_ = (bitField0_ & ~0x00000001);
17329         return this;
17330       }
17331       /**
17332        * <code>optional .RegionSpecifier region = 1;</code>
17333        */
17334       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
17335         bitField0_ |= 0x00000001;
17336         onChanged();
17337         return getRegionFieldBuilder().getBuilder();
17338       }
17339       /**
17340        * <code>optional .RegionSpecifier region = 1;</code>
17341        */
17342       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
17343         if (regionBuilder_ != null) {
17344           return regionBuilder_.getMessageOrBuilder();
17345         } else {
17346           return region_;
17347         }
17348       }
17349       /**
17350        * <code>optional .RegionSpecifier region = 1;</code>
17351        */
17352       private com.google.protobuf.SingleFieldBuilder<
17353           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> 
17354           getRegionFieldBuilder() {
17355         if (regionBuilder_ == null) {
17356           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
17357               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
17358                   region_,
17359                   getParentForChildren(),
17360                   isClean());
17361           region_ = null;
17362         }
17363         return regionBuilder_;
17364       }
17365 
17366       // optional .Scan scan = 2;
17367       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
17368       private com.google.protobuf.SingleFieldBuilder<
17369           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_;
17370       /**
17371        * <code>optional .Scan scan = 2;</code>
17372        */
17373       public boolean hasScan() {
17374         return ((bitField0_ & 0x00000002) == 0x00000002);
17375       }
17376       /**
17377        * <code>optional .Scan scan = 2;</code>
17378        */
17379       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
17380         if (scanBuilder_ == null) {
17381           return scan_;
17382         } else {
17383           return scanBuilder_.getMessage();
17384         }
17385       }
17386       /**
17387        * <code>optional .Scan scan = 2;</code>
17388        */
17389       public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
17390         if (scanBuilder_ == null) {
17391           if (value == null) {
17392             throw new NullPointerException();
17393           }
17394           scan_ = value;
17395           onChanged();
17396         } else {
17397           scanBuilder_.setMessage(value);
17398         }
17399         bitField0_ |= 0x00000002;
17400         return this;
17401       }
17402       /**
17403        * <code>optional .Scan scan = 2;</code>
17404        */
17405       public Builder setScan(
17406           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) {
17407         if (scanBuilder_ == null) {
17408           scan_ = builderForValue.build();
17409           onChanged();
17410         } else {
17411           scanBuilder_.setMessage(builderForValue.build());
17412         }
17413         bitField0_ |= 0x00000002;
17414         return this;
17415       }
17416       /**
17417        * <code>optional .Scan scan = 2;</code>
17418        */
17419       public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
17420         if (scanBuilder_ == null) {
17421           if (((bitField0_ & 0x00000002) == 0x00000002) &&
17422               scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) {
17423             scan_ =
17424               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial();
17425           } else {
17426             scan_ = value;
17427           }
17428           onChanged();
17429         } else {
17430           scanBuilder_.mergeFrom(value);
17431         }
17432         bitField0_ |= 0x00000002;
17433         return this;
17434       }
17435       /**
17436        * <code>optional .Scan scan = 2;</code>
17437        */
17438       public Builder clearScan() {
17439         if (scanBuilder_ == null) {
17440           scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
17441           onChanged();
17442         } else {
17443           scanBuilder_.clear();
17444         }
17445         bitField0_ = (bitField0_ & ~0x00000002);
17446         return this;
17447       }
17448       /**
17449        * <code>optional .Scan scan = 2;</code>
17450        */
17451       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() {
17452         bitField0_ |= 0x00000002;
17453         onChanged();
17454         return getScanFieldBuilder().getBuilder();
17455       }
17456       /**
17457        * <code>optional .Scan scan = 2;</code>
17458        */
17459       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
17460         if (scanBuilder_ != null) {
17461           return scanBuilder_.getMessageOrBuilder();
17462         } else {
17463           return scan_;
17464         }
17465       }
17466       /**
17467        * <code>optional .Scan scan = 2;</code>
17468        */
17469       private com.google.protobuf.SingleFieldBuilder<
17470           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> 
17471           getScanFieldBuilder() {
17472         if (scanBuilder_ == null) {
17473           scanBuilder_ = new com.google.protobuf.SingleFieldBuilder<
17474               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>(
17475                   scan_,
17476                   getParentForChildren(),
17477                   isClean());
17478           scan_ = null;
17479         }
17480         return scanBuilder_;
17481       }
17482 
17483       // optional uint64 scanner_id = 3;
17484       private long scannerId_ ;
17485       /**
17486        * <code>optional uint64 scanner_id = 3;</code>
17487        */
17488       public boolean hasScannerId() {
17489         return ((bitField0_ & 0x00000004) == 0x00000004);
17490       }
17491       /**
17492        * <code>optional uint64 scanner_id = 3;</code>
17493        */
17494       public long getScannerId() {
17495         return scannerId_;
17496       }
17497       /**
17498        * <code>optional uint64 scanner_id = 3;</code>
17499        */
17500       public Builder setScannerId(long value) {
17501         bitField0_ |= 0x00000004;
17502         scannerId_ = value;
17503         onChanged();
17504         return this;
17505       }
17506       /**
17507        * <code>optional uint64 scanner_id = 3;</code>
17508        */
17509       public Builder clearScannerId() {
17510         bitField0_ = (bitField0_ & ~0x00000004);
17511         scannerId_ = 0L;
17512         onChanged();
17513         return this;
17514       }
17515 
17516       // optional uint32 number_of_rows = 4;
17517       private int numberOfRows_ ;
17518       /**
17519        * <code>optional uint32 number_of_rows = 4;</code>
17520        */
17521       public boolean hasNumberOfRows() {
17522         return ((bitField0_ & 0x00000008) == 0x00000008);
17523       }
17524       /**
17525        * <code>optional uint32 number_of_rows = 4;</code>
17526        */
17527       public int getNumberOfRows() {
17528         return numberOfRows_;
17529       }
17530       /**
17531        * <code>optional uint32 number_of_rows = 4;</code>
17532        */
17533       public Builder setNumberOfRows(int value) {
17534         bitField0_ |= 0x00000008;
17535         numberOfRows_ = value;
17536         onChanged();
17537         return this;
17538       }
17539       /**
17540        * <code>optional uint32 number_of_rows = 4;</code>
17541        */
17542       public Builder clearNumberOfRows() {
17543         bitField0_ = (bitField0_ & ~0x00000008);
17544         numberOfRows_ = 0;
17545         onChanged();
17546         return this;
17547       }
17548 
17549       // optional bool close_scanner = 5;
17550       private boolean closeScanner_ ;
17551       /**
17552        * <code>optional bool close_scanner = 5;</code>
17553        */
17554       public boolean hasCloseScanner() {
17555         return ((bitField0_ & 0x00000010) == 0x00000010);
17556       }
17557       /**
17558        * <code>optional bool close_scanner = 5;</code>
17559        */
17560       public boolean getCloseScanner() {
17561         return closeScanner_;
17562       }
17563       /**
17564        * <code>optional bool close_scanner = 5;</code>
17565        */
17566       public Builder setCloseScanner(boolean value) {
17567         bitField0_ |= 0x00000010;
17568         closeScanner_ = value;
17569         onChanged();
17570         return this;
17571       }
17572       /**
17573        * <code>optional bool close_scanner = 5;</code>
17574        */
17575       public Builder clearCloseScanner() {
17576         bitField0_ = (bitField0_ & ~0x00000010);
17577         closeScanner_ = false;
17578         onChanged();
17579         return this;
17580       }
17581 
17582       // optional uint64 next_call_seq = 6;
17583       private long nextCallSeq_ ;
17584       /**
17585        * <code>optional uint64 next_call_seq = 6;</code>
17586        */
17587       public boolean hasNextCallSeq() {
17588         return ((bitField0_ & 0x00000020) == 0x00000020);
17589       }
17590       /**
17591        * <code>optional uint64 next_call_seq = 6;</code>
17592        */
17593       public long getNextCallSeq() {
17594         return nextCallSeq_;
17595       }
17596       /**
17597        * <code>optional uint64 next_call_seq = 6;</code>
17598        */
17599       public Builder setNextCallSeq(long value) {
17600         bitField0_ |= 0x00000020;
17601         nextCallSeq_ = value;
17602         onChanged();
17603         return this;
17604       }
17605       /**
17606        * <code>optional uint64 next_call_seq = 6;</code>
17607        */
17608       public Builder clearNextCallSeq() {
17609         bitField0_ = (bitField0_ & ~0x00000020);
17610         nextCallSeq_ = 0L;
17611         onChanged();
17612         return this;
17613       }
17614 
17615       // optional bool client_handles_partials = 7;
17616       private boolean clientHandlesPartials_ ;
17617       /**
17618        * <code>optional bool client_handles_partials = 7;</code>
17619        */
17620       public boolean hasClientHandlesPartials() {
17621         return ((bitField0_ & 0x00000040) == 0x00000040);
17622       }
17623       /**
17624        * <code>optional bool client_handles_partials = 7;</code>
17625        */
17626       public boolean getClientHandlesPartials() {
17627         return clientHandlesPartials_;
17628       }
17629       /**
17630        * <code>optional bool client_handles_partials = 7;</code>
17631        */
17632       public Builder setClientHandlesPartials(boolean value) {
17633         bitField0_ |= 0x00000040;
17634         clientHandlesPartials_ = value;
17635         onChanged();
17636         return this;
17637       }
17638       /**
17639        * <code>optional bool client_handles_partials = 7;</code>
17640        */
17641       public Builder clearClientHandlesPartials() {
17642         bitField0_ = (bitField0_ & ~0x00000040);
17643         clientHandlesPartials_ = false;
17644         onChanged();
17645         return this;
17646       }
17647 
17648       // optional bool client_handles_heartbeats = 8;
17649       private boolean clientHandlesHeartbeats_ ;
17650       /**
17651        * <code>optional bool client_handles_heartbeats = 8;</code>
17652        */
17653       public boolean hasClientHandlesHeartbeats() {
17654         return ((bitField0_ & 0x00000080) == 0x00000080);
17655       }
17656       /**
17657        * <code>optional bool client_handles_heartbeats = 8;</code>
17658        */
17659       public boolean getClientHandlesHeartbeats() {
17660         return clientHandlesHeartbeats_;
17661       }
17662       /**
17663        * <code>optional bool client_handles_heartbeats = 8;</code>
17664        */
17665       public Builder setClientHandlesHeartbeats(boolean value) {
17666         bitField0_ |= 0x00000080;
17667         clientHandlesHeartbeats_ = value;
17668         onChanged();
17669         return this;
17670       }
17671       /**
17672        * <code>optional bool client_handles_heartbeats = 8;</code>
17673        */
17674       public Builder clearClientHandlesHeartbeats() {
17675         bitField0_ = (bitField0_ & ~0x00000080);
17676         clientHandlesHeartbeats_ = false;
17677         onChanged();
17678         return this;
17679       }
17680 
17681       // @@protoc_insertion_point(builder_scope:ScanRequest)
17682     }
17683 
17684     static {
17685       defaultInstance = new ScanRequest(true);
17686       defaultInstance.initFields();
17687     }
17688 
17689     // @@protoc_insertion_point(class_scope:ScanRequest)
17690   }
17691 
17692   public interface ScanResponseOrBuilder
17693       extends com.google.protobuf.MessageOrBuilder {
17694 
17695     // repeated uint32 cells_per_result = 1;
17696     /**
17697      * <code>repeated uint32 cells_per_result = 1;</code>
17698      *
17699      * <pre>
17700      * This field is filled in if we are doing cellblocks.  A cellblock is made up
17701      * of all Cells serialized out as one cellblock BUT responses from a server
17702      * have their Cells grouped by Result.  So we can reconstitute the
17703      * Results on the client-side, this field is a list of counts of Cells
17704      * in each Result that makes up the response.  For example, if this field
17705      * has 3, 3, 3 in it, then we know that on the client, we are to make
17706      * three Results each of three Cells each.
17707      * </pre>
17708      */
17709     java.util.List<java.lang.Integer> getCellsPerResultList();
17710     /**
17711      * <code>repeated uint32 cells_per_result = 1;</code>
17712      *
17713      * <pre>
17714      * This field is filled in if we are doing cellblocks.  A cellblock is made up
17715      * of all Cells serialized out as one cellblock BUT responses from a server
17716      * have their Cells grouped by Result.  So we can reconstitute the
17717      * Results on the client-side, this field is a list of counts of Cells
17718      * in each Result that makes up the response.  For example, if this field
17719      * has 3, 3, 3 in it, then we know that on the client, we are to make
17720      * three Results each of three Cells each.
17721      * </pre>
17722      */
17723     int getCellsPerResultCount();
17724     /**
17725      * <code>repeated uint32 cells_per_result = 1;</code>
17726      *
17727      * <pre>
17728      * This field is filled in if we are doing cellblocks.  A cellblock is made up
17729      * of all Cells serialized out as one cellblock BUT responses from a server
17730      * have their Cells grouped by Result.  So we can reconstitute the
17731      * Results on the client-side, this field is a list of counts of Cells
17732      * in each Result that makes up the response.  For example, if this field
17733      * has 3, 3, 3 in it, then we know that on the client, we are to make
17734      * three Results each of three Cells each.
17735      * </pre>
17736      */
17737     int getCellsPerResult(int index);
17738 
17739     // optional uint64 scanner_id = 2;
17740     /**
17741      * <code>optional uint64 scanner_id = 2;</code>
17742      */
17743     boolean hasScannerId();
17744     /**
17745      * <code>optional uint64 scanner_id = 2;</code>
17746      */
17747     long getScannerId();
17748 
17749     // optional bool more_results = 3;
17750     /**
17751      * <code>optional bool more_results = 3;</code>
17752      */
17753     boolean hasMoreResults();
17754     /**
17755      * <code>optional bool more_results = 3;</code>
17756      */
17757     boolean getMoreResults();
17758 
17759     // optional uint32 ttl = 4;
17760     /**
17761      * <code>optional uint32 ttl = 4;</code>
17762      */
17763     boolean hasTtl();
17764     /**
17765      * <code>optional uint32 ttl = 4;</code>
17766      */
17767     int getTtl();
17768 
17769     // repeated .Result results = 5;
17770     /**
17771      * <code>repeated .Result results = 5;</code>
17772      *
17773      * <pre>
17774      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
17775      * This field is mutually exclusive with cells_per_result (since the Cells will
17776      * be inside the pb'd Result)
17777      * </pre>
17778      */
17779     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> 
17780         getResultsList();
17781     /**
17782      * <code>repeated .Result results = 5;</code>
17783      *
17784      * <pre>
17785      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
17786      * This field is mutually exclusive with cells_per_result (since the Cells will
17787      * be inside the pb'd Result)
17788      * </pre>
17789      */
17790     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResults(int index);
17791     /**
17792      * <code>repeated .Result results = 5;</code>
17793      *
17794      * <pre>
17795      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
17796      * This field is mutually exclusive with cells_per_result (since the Cells will
17797      * be inside the pb'd Result)
17798      * </pre>
17799      */
17800     int getResultsCount();
17801     /**
17802      * <code>repeated .Result results = 5;</code>
17803      *
17804      * <pre>
17805      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
17806      * This field is mutually exclusive with cells_per_result (since the Cells will
17807      * be inside the pb'd Result)
17808      * </pre>
17809      */
17810     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> 
17811         getResultsOrBuilderList();
17812     /**
17813      * <code>repeated .Result results = 5;</code>
17814      *
17815      * <pre>
17816      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
17817      * This field is mutually exclusive with cells_per_result (since the Cells will
17818      * be inside the pb'd Result)
17819      * </pre>
17820      */
17821     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultsOrBuilder(
17822         int index);
17823 
17824     // optional bool stale = 6;
17825     /**
17826      * <code>optional bool stale = 6;</code>
17827      */
17828     boolean hasStale();
17829     /**
17830      * <code>optional bool stale = 6;</code>
17831      */
17832     boolean getStale();
17833 
17834     // repeated bool partial_flag_per_result = 7;
17835     /**
17836      * <code>repeated bool partial_flag_per_result = 7;</code>
17837      *
17838      * <pre>
17839      * This field is filled in if we are doing cellblocks. In the event that a row
17840      * could not fit all of its cells into a single RPC chunk, the results will be
17841      * returned as partials, and reconstructed into a complete result on the client
17842      * side. This field is a list of flags indicating whether or not the result
17843      * that the cells belong to is a partial result. For example, if this field
17844      * has false, false, true in it, then we know that on the client side, we need to
17845      * make another RPC request since the last result was only a partial.
17846      * </pre>
17847      */
17848     java.util.List<java.lang.Boolean> getPartialFlagPerResultList();
17849     /**
17850      * <code>repeated bool partial_flag_per_result = 7;</code>
17851      *
17852      * <pre>
17853      * This field is filled in if we are doing cellblocks. In the event that a row
17854      * could not fit all of its cells into a single RPC chunk, the results will be
17855      * returned as partials, and reconstructed into a complete result on the client
17856      * side. This field is a list of flags indicating whether or not the result
17857      * that the cells belong to is a partial result. For example, if this field
17858      * has false, false, true in it, then we know that on the client side, we need to
17859      * make another RPC request since the last result was only a partial.
17860      * </pre>
17861      */
17862     int getPartialFlagPerResultCount();
17863     /**
17864      * <code>repeated bool partial_flag_per_result = 7;</code>
17865      *
17866      * <pre>
17867      * This field is filled in if we are doing cellblocks. In the event that a row
17868      * could not fit all of its cells into a single RPC chunk, the results will be
17869      * returned as partials, and reconstructed into a complete result on the client
17870      * side. This field is a list of flags indicating whether or not the result
17871      * that the cells belong to is a partial result. For example, if this field
17872      * has false, false, true in it, then we know that on the client side, we need to
17873      * make another RPC request since the last result was only a partial.
17874      * </pre>
17875      */
17876     boolean getPartialFlagPerResult(int index);
17877 
17878     // optional bool more_results_in_region = 8;
17879     /**
17880      * <code>optional bool more_results_in_region = 8;</code>
17881      *
17882      * <pre>
17883      * A server may choose to limit the number of results returned to the client for
17884      * reasons such as the size in bytes or quantity of results accumulated. This field
17885      * will true when more results exist in the current region.
17886      * </pre>
17887      */
17888     boolean hasMoreResultsInRegion();
17889     /**
17890      * <code>optional bool more_results_in_region = 8;</code>
17891      *
17892      * <pre>
17893      * A server may choose to limit the number of results returned to the client for
17894      * reasons such as the size in bytes or quantity of results accumulated. This field
17895      * will true when more results exist in the current region.
17896      * </pre>
17897      */
17898     boolean getMoreResultsInRegion();
17899 
17900     // optional bool heartbeat_message = 9;
17901     /**
17902      * <code>optional bool heartbeat_message = 9;</code>
17903      *
17904      * <pre>
17905      * This field is filled in if the server is sending back a heartbeat message.
17906      * Heartbeat messages are sent back to the client to prevent the scanner from
17907      * timing out. Seeing a heartbeat message communicates to the Client that the
17908      * server would have continued to scan had the time limit not been reached.
17909      * </pre>
17910      */
17911     boolean hasHeartbeatMessage();
17912     /**
17913      * <code>optional bool heartbeat_message = 9;</code>
17914      *
17915      * <pre>
17916      * This field is filled in if the server is sending back a heartbeat message.
17917      * Heartbeat messages are sent back to the client to prevent the scanner from
17918      * timing out. Seeing a heartbeat message communicates to the Client that the
17919      * server would have continued to scan had the time limit not been reached.
17920      * </pre>
17921      */
17922     boolean getHeartbeatMessage();
17923   }
17924   /**
17925    * Protobuf type {@code ScanResponse}
17926    *
17927    * <pre>
17928    **
17929    * The scan response. If there are no more results, more_results will
17930    * be false.  If it is not specified, it means there are more.
17931    * </pre>
17932    */
17933   public static final class ScanResponse extends
17934       com.google.protobuf.GeneratedMessage
17935       implements ScanResponseOrBuilder {
17936     // Use ScanResponse.newBuilder() to construct.
17937     private ScanResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
17938       super(builder);
17939       this.unknownFields = builder.getUnknownFields();
17940     }
17941     private ScanResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
17942 
17943     private static final ScanResponse defaultInstance;
17944     public static ScanResponse getDefaultInstance() {
17945       return defaultInstance;
17946     }
17947 
17948     public ScanResponse getDefaultInstanceForType() {
17949       return defaultInstance;
17950     }
17951 
17952     private final com.google.protobuf.UnknownFieldSet unknownFields;
17953     @java.lang.Override
17954     public final com.google.protobuf.UnknownFieldSet
17955         getUnknownFields() {
17956       return this.unknownFields;
17957     }
17958     private ScanResponse(
17959         com.google.protobuf.CodedInputStream input,
17960         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17961         throws com.google.protobuf.InvalidProtocolBufferException {
17962       initFields();
17963       int mutable_bitField0_ = 0;
17964       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
17965           com.google.protobuf.UnknownFieldSet.newBuilder();
17966       try {
17967         boolean done = false;
17968         while (!done) {
17969           int tag = input.readTag();
17970           switch (tag) {
17971             case 0:
17972               done = true;
17973               break;
17974             default: {
17975               if (!parseUnknownField(input, unknownFields,
17976                                      extensionRegistry, tag)) {
17977                 done = true;
17978               }
17979               break;
17980             }
17981             case 8: {
17982               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
17983                 cellsPerResult_ = new java.util.ArrayList<java.lang.Integer>();
17984                 mutable_bitField0_ |= 0x00000001;
17985               }
17986               cellsPerResult_.add(input.readUInt32());
17987               break;
17988             }
17989             case 10: {
17990               int length = input.readRawVarint32();
17991               int limit = input.pushLimit(length);
17992               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) {
17993                 cellsPerResult_ = new java.util.ArrayList<java.lang.Integer>();
17994                 mutable_bitField0_ |= 0x00000001;
17995               }
17996               while (input.getBytesUntilLimit() > 0) {
17997                 cellsPerResult_.add(input.readUInt32());
17998               }
17999               input.popLimit(limit);
18000               break;
18001             }
18002             case 16: {
18003               bitField0_ |= 0x00000001;
18004               scannerId_ = input.readUInt64();
18005               break;
18006             }
18007             case 24: {
18008               bitField0_ |= 0x00000002;
18009               moreResults_ = input.readBool();
18010               break;
18011             }
18012             case 32: {
18013               bitField0_ |= 0x00000004;
18014               ttl_ = input.readUInt32();
18015               break;
18016             }
18017             case 42: {
18018               if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
18019                 results_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result>();
18020                 mutable_bitField0_ |= 0x00000010;
18021               }
18022               results_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry));
18023               break;
18024             }
18025             case 48: {
18026               bitField0_ |= 0x00000008;
18027               stale_ = input.readBool();
18028               break;
18029             }
18030             case 56: {
18031               if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
18032                 partialFlagPerResult_ = new java.util.ArrayList<java.lang.Boolean>();
18033                 mutable_bitField0_ |= 0x00000040;
18034               }
18035               partialFlagPerResult_.add(input.readBool());
18036               break;
18037             }
18038             case 58: {
18039               int length = input.readRawVarint32();
18040               int limit = input.pushLimit(length);
18041               if (!((mutable_bitField0_ & 0x00000040) == 0x00000040) && input.getBytesUntilLimit() > 0) {
18042                 partialFlagPerResult_ = new java.util.ArrayList<java.lang.Boolean>();
18043                 mutable_bitField0_ |= 0x00000040;
18044               }
18045               while (input.getBytesUntilLimit() > 0) {
18046                 partialFlagPerResult_.add(input.readBool());
18047               }
18048               input.popLimit(limit);
18049               break;
18050             }
18051             case 64: {
18052               bitField0_ |= 0x00000010;
18053               moreResultsInRegion_ = input.readBool();
18054               break;
18055             }
18056             case 72: {
18057               bitField0_ |= 0x00000020;
18058               heartbeatMessage_ = input.readBool();
18059               break;
18060             }
18061           }
18062         }
18063       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18064         throw e.setUnfinishedMessage(this);
18065       } catch (java.io.IOException e) {
18066         throw new com.google.protobuf.InvalidProtocolBufferException(
18067             e.getMessage()).setUnfinishedMessage(this);
18068       } finally {
18069         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
18070           cellsPerResult_ = java.util.Collections.unmodifiableList(cellsPerResult_);
18071         }
18072         if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
18073           results_ = java.util.Collections.unmodifiableList(results_);
18074         }
18075         if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
18076           partialFlagPerResult_ = java.util.Collections.unmodifiableList(partialFlagPerResult_);
18077         }
18078         this.unknownFields = unknownFields.build();
18079         makeExtensionsImmutable();
18080       }
18081     }
18082     public static final com.google.protobuf.Descriptors.Descriptor
18083         getDescriptor() {
18084       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_descriptor;
18085     }
18086 
18087     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
18088         internalGetFieldAccessorTable() {
18089       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_fieldAccessorTable
18090           .ensureFieldAccessorsInitialized(
18091               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.Builder.class);
18092     }
18093 
18094     public static com.google.protobuf.Parser<ScanResponse> PARSER =
18095         new com.google.protobuf.AbstractParser<ScanResponse>() {
18096       public ScanResponse parsePartialFrom(
18097           com.google.protobuf.CodedInputStream input,
18098           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18099           throws com.google.protobuf.InvalidProtocolBufferException {
18100         return new ScanResponse(input, extensionRegistry);
18101       }
18102     };
18103 
18104     @java.lang.Override
18105     public com.google.protobuf.Parser<ScanResponse> getParserForType() {
18106       return PARSER;
18107     }
18108 
18109     private int bitField0_;
18110     // repeated uint32 cells_per_result = 1;
18111     public static final int CELLS_PER_RESULT_FIELD_NUMBER = 1;
18112     private java.util.List<java.lang.Integer> cellsPerResult_;
18113     /**
18114      * <code>repeated uint32 cells_per_result = 1;</code>
18115      *
18116      * <pre>
18117      * This field is filled in if we are doing cellblocks.  A cellblock is made up
18118      * of all Cells serialized out as one cellblock BUT responses from a server
18119      * have their Cells grouped by Result.  So we can reconstitute the
18120      * Results on the client-side, this field is a list of counts of Cells
18121      * in each Result that makes up the response.  For example, if this field
18122      * has 3, 3, 3 in it, then we know that on the client, we are to make
18123      * three Results each of three Cells each.
18124      * </pre>
18125      */
18126     public java.util.List<java.lang.Integer>
18127         getCellsPerResultList() {
18128       return cellsPerResult_;
18129     }
18130     /**
18131      * <code>repeated uint32 cells_per_result = 1;</code>
18132      *
18133      * <pre>
18134      * This field is filled in if we are doing cellblocks.  A cellblock is made up
18135      * of all Cells serialized out as one cellblock BUT responses from a server
18136      * have their Cells grouped by Result.  So we can reconstitute the
18137      * Results on the client-side, this field is a list of counts of Cells
18138      * in each Result that makes up the response.  For example, if this field
18139      * has 3, 3, 3 in it, then we know that on the client, we are to make
18140      * three Results each of three Cells each.
18141      * </pre>
18142      */
18143     public int getCellsPerResultCount() {
18144       return cellsPerResult_.size();
18145     }
18146     /**
18147      * <code>repeated uint32 cells_per_result = 1;</code>
18148      *
18149      * <pre>
18150      * This field is filled in if we are doing cellblocks.  A cellblock is made up
18151      * of all Cells serialized out as one cellblock BUT responses from a server
18152      * have their Cells grouped by Result.  So we can reconstitute the
18153      * Results on the client-side, this field is a list of counts of Cells
18154      * in each Result that makes up the response.  For example, if this field
18155      * has 3, 3, 3 in it, then we know that on the client, we are to make
18156      * three Results each of three Cells each.
18157      * </pre>
18158      */
18159     public int getCellsPerResult(int index) {
18160       return cellsPerResult_.get(index);
18161     }
18162 
18163     // optional uint64 scanner_id = 2;
18164     public static final int SCANNER_ID_FIELD_NUMBER = 2;
18165     private long scannerId_;
18166     /**
18167      * <code>optional uint64 scanner_id = 2;</code>
18168      */
18169     public boolean hasScannerId() {
18170       return ((bitField0_ & 0x00000001) == 0x00000001);
18171     }
18172     /**
18173      * <code>optional uint64 scanner_id = 2;</code>
18174      */
18175     public long getScannerId() {
18176       return scannerId_;
18177     }
18178 
18179     // optional bool more_results = 3;
18180     public static final int MORE_RESULTS_FIELD_NUMBER = 3;
18181     private boolean moreResults_;
18182     /**
18183      * <code>optional bool more_results = 3;</code>
18184      */
18185     public boolean hasMoreResults() {
18186       return ((bitField0_ & 0x00000002) == 0x00000002);
18187     }
18188     /**
18189      * <code>optional bool more_results = 3;</code>
18190      */
18191     public boolean getMoreResults() {
18192       return moreResults_;
18193     }
18194 
18195     // optional uint32 ttl = 4;
18196     public static final int TTL_FIELD_NUMBER = 4;
18197     private int ttl_;
18198     /**
18199      * <code>optional uint32 ttl = 4;</code>
18200      */
18201     public boolean hasTtl() {
18202       return ((bitField0_ & 0x00000004) == 0x00000004);
18203     }
18204     /**
18205      * <code>optional uint32 ttl = 4;</code>
18206      */
18207     public int getTtl() {
18208       return ttl_;
18209     }
18210 
18211     // repeated .Result results = 5;
18212     public static final int RESULTS_FIELD_NUMBER = 5;
18213     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> results_;
18214     /**
18215      * <code>repeated .Result results = 5;</code>
18216      *
18217      * <pre>
18218      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
18219      * This field is mutually exclusive with cells_per_result (since the Cells will
18220      * be inside the pb'd Result)
18221      * </pre>
18222      */
18223     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> getResultsList() {
18224       return results_;
18225     }
18226     /**
18227      * <code>repeated .Result results = 5;</code>
18228      *
18229      * <pre>
18230      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
18231      * This field is mutually exclusive with cells_per_result (since the Cells will
18232      * be inside the pb'd Result)
18233      * </pre>
18234      */
18235     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> 
18236         getResultsOrBuilderList() {
18237       return results_;
18238     }
18239     /**
18240      * <code>repeated .Result results = 5;</code>
18241      *
18242      * <pre>
18243      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
18244      * This field is mutually exclusive with cells_per_result (since the Cells will
18245      * be inside the pb'd Result)
18246      * </pre>
18247      */
18248     public int getResultsCount() {
18249       return results_.size();
18250     }
18251     /**
18252      * <code>repeated .Result results = 5;</code>
18253      *
18254      * <pre>
18255      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
18256      * This field is mutually exclusive with cells_per_result (since the Cells will
18257      * be inside the pb'd Result)
18258      * </pre>
18259      */
18260     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResults(int index) {
18261       return results_.get(index);
18262     }
18263     /**
18264      * <code>repeated .Result results = 5;</code>
18265      *
18266      * <pre>
18267      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
18268      * This field is mutually exclusive with cells_per_result (since the Cells will
18269      * be inside the pb'd Result)
18270      * </pre>
18271      */
18272     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultsOrBuilder(
18273         int index) {
18274       return results_.get(index);
18275     }
18276 
18277     // optional bool stale = 6;
18278     public static final int STALE_FIELD_NUMBER = 6;
18279     private boolean stale_;
18280     /**
18281      * <code>optional bool stale = 6;</code>
18282      */
18283     public boolean hasStale() {
18284       return ((bitField0_ & 0x00000008) == 0x00000008);
18285     }
18286     /**
18287      * <code>optional bool stale = 6;</code>
18288      */
18289     public boolean getStale() {
18290       return stale_;
18291     }
18292 
18293     // repeated bool partial_flag_per_result = 7;
18294     public static final int PARTIAL_FLAG_PER_RESULT_FIELD_NUMBER = 7;
18295     private java.util.List<java.lang.Boolean> partialFlagPerResult_;
18296     /**
18297      * <code>repeated bool partial_flag_per_result = 7;</code>
18298      *
18299      * <pre>
18300      * This field is filled in if we are doing cellblocks. In the event that a row
18301      * could not fit all of its cells into a single RPC chunk, the results will be
18302      * returned as partials, and reconstructed into a complete result on the client
18303      * side. This field is a list of flags indicating whether or not the result
18304      * that the cells belong to is a partial result. For example, if this field
18305      * has false, false, true in it, then we know that on the client side, we need to
18306      * make another RPC request since the last result was only a partial.
18307      * </pre>
18308      */
18309     public java.util.List<java.lang.Boolean>
18310         getPartialFlagPerResultList() {
18311       return partialFlagPerResult_;
18312     }
18313     /**
18314      * <code>repeated bool partial_flag_per_result = 7;</code>
18315      *
18316      * <pre>
18317      * This field is filled in if we are doing cellblocks. In the event that a row
18318      * could not fit all of its cells into a single RPC chunk, the results will be
18319      * returned as partials, and reconstructed into a complete result on the client
18320      * side. This field is a list of flags indicating whether or not the result
18321      * that the cells belong to is a partial result. For example, if this field
18322      * has false, false, true in it, then we know that on the client side, we need to
18323      * make another RPC request since the last result was only a partial.
18324      * </pre>
18325      */
18326     public int getPartialFlagPerResultCount() {
18327       return partialFlagPerResult_.size();
18328     }
18329     /**
18330      * <code>repeated bool partial_flag_per_result = 7;</code>
18331      *
18332      * <pre>
18333      * This field is filled in if we are doing cellblocks. In the event that a row
18334      * could not fit all of its cells into a single RPC chunk, the results will be
18335      * returned as partials, and reconstructed into a complete result on the client
18336      * side. This field is a list of flags indicating whether or not the result
18337      * that the cells belong to is a partial result. For example, if this field
18338      * has false, false, true in it, then we know that on the client side, we need to
18339      * make another RPC request since the last result was only a partial.
18340      * </pre>
18341      */
18342     public boolean getPartialFlagPerResult(int index) {
18343       return partialFlagPerResult_.get(index);
18344     }
18345 
18346     // optional bool more_results_in_region = 8;
18347     public static final int MORE_RESULTS_IN_REGION_FIELD_NUMBER = 8;
18348     private boolean moreResultsInRegion_;
18349     /**
18350      * <code>optional bool more_results_in_region = 8;</code>
18351      *
18352      * <pre>
18353      * A server may choose to limit the number of results returned to the client for
18354      * reasons such as the size in bytes or quantity of results accumulated. This field
18355      * will true when more results exist in the current region.
18356      * </pre>
18357      */
18358     public boolean hasMoreResultsInRegion() {
18359       return ((bitField0_ & 0x00000010) == 0x00000010);
18360     }
18361     /**
18362      * <code>optional bool more_results_in_region = 8;</code>
18363      *
18364      * <pre>
18365      * A server may choose to limit the number of results returned to the client for
18366      * reasons such as the size in bytes or quantity of results accumulated. This field
18367      * will true when more results exist in the current region.
18368      * </pre>
18369      */
18370     public boolean getMoreResultsInRegion() {
18371       return moreResultsInRegion_;
18372     }
18373 
18374     // optional bool heartbeat_message = 9;
18375     public static final int HEARTBEAT_MESSAGE_FIELD_NUMBER = 9;
18376     private boolean heartbeatMessage_;
18377     /**
18378      * <code>optional bool heartbeat_message = 9;</code>
18379      *
18380      * <pre>
18381      * This field is filled in if the server is sending back a heartbeat message.
18382      * Heartbeat messages are sent back to the client to prevent the scanner from
18383      * timing out. Seeing a heartbeat message communicates to the Client that the
18384      * server would have continued to scan had the time limit not been reached.
18385      * </pre>
18386      */
18387     public boolean hasHeartbeatMessage() {
18388       return ((bitField0_ & 0x00000020) == 0x00000020);
18389     }
18390     /**
18391      * <code>optional bool heartbeat_message = 9;</code>
18392      *
18393      * <pre>
18394      * This field is filled in if the server is sending back a heartbeat message.
18395      * Heartbeat messages are sent back to the client to prevent the scanner from
18396      * timing out. Seeing a heartbeat message communicates to the Client that the
18397      * server would have continued to scan had the time limit not been reached.
18398      * </pre>
18399      */
18400     public boolean getHeartbeatMessage() {
18401       return heartbeatMessage_;
18402     }
18403 
18404     private void initFields() {
18405       cellsPerResult_ = java.util.Collections.emptyList();
18406       scannerId_ = 0L;
18407       moreResults_ = false;
18408       ttl_ = 0;
18409       results_ = java.util.Collections.emptyList();
18410       stale_ = false;
18411       partialFlagPerResult_ = java.util.Collections.emptyList();
18412       moreResultsInRegion_ = false;
18413       heartbeatMessage_ = false;
18414     }
18415     private byte memoizedIsInitialized = -1;
18416     public final boolean isInitialized() {
18417       byte isInitialized = memoizedIsInitialized;
18418       if (isInitialized != -1) return isInitialized == 1;
18419 
18420       memoizedIsInitialized = 1;
18421       return true;
18422     }
18423 
18424     public void writeTo(com.google.protobuf.CodedOutputStream output)
18425                         throws java.io.IOException {
18426       getSerializedSize();
18427       for (int i = 0; i < cellsPerResult_.size(); i++) {
18428         output.writeUInt32(1, cellsPerResult_.get(i));
18429       }
18430       if (((bitField0_ & 0x00000001) == 0x00000001)) {
18431         output.writeUInt64(2, scannerId_);
18432       }
18433       if (((bitField0_ & 0x00000002) == 0x00000002)) {
18434         output.writeBool(3, moreResults_);
18435       }
18436       if (((bitField0_ & 0x00000004) == 0x00000004)) {
18437         output.writeUInt32(4, ttl_);
18438       }
18439       for (int i = 0; i < results_.size(); i++) {
18440         output.writeMessage(5, results_.get(i));
18441       }
18442       if (((bitField0_ & 0x00000008) == 0x00000008)) {
18443         output.writeBool(6, stale_);
18444       }
18445       for (int i = 0; i < partialFlagPerResult_.size(); i++) {
18446         output.writeBool(7, partialFlagPerResult_.get(i));
18447       }
18448       if (((bitField0_ & 0x00000010) == 0x00000010)) {
18449         output.writeBool(8, moreResultsInRegion_);
18450       }
18451       if (((bitField0_ & 0x00000020) == 0x00000020)) {
18452         output.writeBool(9, heartbeatMessage_);
18453       }
18454       getUnknownFields().writeTo(output);
18455     }
18456 
18457     private int memoizedSerializedSize = -1;
18458     public int getSerializedSize() {
18459       int size = memoizedSerializedSize;
18460       if (size != -1) return size;
18461 
18462       size = 0;
18463       {
18464         int dataSize = 0;
18465         for (int i = 0; i < cellsPerResult_.size(); i++) {
18466           dataSize += com.google.protobuf.CodedOutputStream
18467             .computeUInt32SizeNoTag(cellsPerResult_.get(i));
18468         }
18469         size += dataSize;
18470         size += 1 * getCellsPerResultList().size();
18471       }
18472       if (((bitField0_ & 0x00000001) == 0x00000001)) {
18473         size += com.google.protobuf.CodedOutputStream
18474           .computeUInt64Size(2, scannerId_);
18475       }
18476       if (((bitField0_ & 0x00000002) == 0x00000002)) {
18477         size += com.google.protobuf.CodedOutputStream
18478           .computeBoolSize(3, moreResults_);
18479       }
18480       if (((bitField0_ & 0x00000004) == 0x00000004)) {
18481         size += com.google.protobuf.CodedOutputStream
18482           .computeUInt32Size(4, ttl_);
18483       }
18484       for (int i = 0; i < results_.size(); i++) {
18485         size += com.google.protobuf.CodedOutputStream
18486           .computeMessageSize(5, results_.get(i));
18487       }
18488       if (((bitField0_ & 0x00000008) == 0x00000008)) {
18489         size += com.google.protobuf.CodedOutputStream
18490           .computeBoolSize(6, stale_);
18491       }
18492       {
18493         int dataSize = 0;
18494         dataSize = 1 * getPartialFlagPerResultList().size();
18495         size += dataSize;
18496         size += 1 * getPartialFlagPerResultList().size();
18497       }
18498       if (((bitField0_ & 0x00000010) == 0x00000010)) {
18499         size += com.google.protobuf.CodedOutputStream
18500           .computeBoolSize(8, moreResultsInRegion_);
18501       }
18502       if (((bitField0_ & 0x00000020) == 0x00000020)) {
18503         size += com.google.protobuf.CodedOutputStream
18504           .computeBoolSize(9, heartbeatMessage_);
18505       }
18506       size += getUnknownFields().getSerializedSize();
18507       memoizedSerializedSize = size;
18508       return size;
18509     }
18510 
18511     private static final long serialVersionUID = 0L;
18512     @java.lang.Override
18513     protected java.lang.Object writeReplace()
18514         throws java.io.ObjectStreamException {
18515       return super.writeReplace();
18516     }
18517 
18518     @java.lang.Override
18519     public boolean equals(final java.lang.Object obj) {
18520       if (obj == this) {
18521        return true;
18522       }
18523       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse)) {
18524         return super.equals(obj);
18525       }
18526       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) obj;
18527 
18528       boolean result = true;
18529       result = result && getCellsPerResultList()
18530           .equals(other.getCellsPerResultList());
18531       result = result && (hasScannerId() == other.hasScannerId());
18532       if (hasScannerId()) {
18533         result = result && (getScannerId()
18534             == other.getScannerId());
18535       }
18536       result = result && (hasMoreResults() == other.hasMoreResults());
18537       if (hasMoreResults()) {
18538         result = result && (getMoreResults()
18539             == other.getMoreResults());
18540       }
18541       result = result && (hasTtl() == other.hasTtl());
18542       if (hasTtl()) {
18543         result = result && (getTtl()
18544             == other.getTtl());
18545       }
18546       result = result && getResultsList()
18547           .equals(other.getResultsList());
18548       result = result && (hasStale() == other.hasStale());
18549       if (hasStale()) {
18550         result = result && (getStale()
18551             == other.getStale());
18552       }
18553       result = result && getPartialFlagPerResultList()
18554           .equals(other.getPartialFlagPerResultList());
18555       result = result && (hasMoreResultsInRegion() == other.hasMoreResultsInRegion());
18556       if (hasMoreResultsInRegion()) {
18557         result = result && (getMoreResultsInRegion()
18558             == other.getMoreResultsInRegion());
18559       }
18560       result = result && (hasHeartbeatMessage() == other.hasHeartbeatMessage());
18561       if (hasHeartbeatMessage()) {
18562         result = result && (getHeartbeatMessage()
18563             == other.getHeartbeatMessage());
18564       }
18565       result = result &&
18566           getUnknownFields().equals(other.getUnknownFields());
18567       return result;
18568     }
18569 
18570     private int memoizedHashCode = 0;
18571     @java.lang.Override
18572     public int hashCode() {
18573       if (memoizedHashCode != 0) {
18574         return memoizedHashCode;
18575       }
18576       int hash = 41;
18577       hash = (19 * hash) + getDescriptorForType().hashCode();
18578       if (getCellsPerResultCount() > 0) {
18579         hash = (37 * hash) + CELLS_PER_RESULT_FIELD_NUMBER;
18580         hash = (53 * hash) + getCellsPerResultList().hashCode();
18581       }
18582       if (hasScannerId()) {
18583         hash = (37 * hash) + SCANNER_ID_FIELD_NUMBER;
18584         hash = (53 * hash) + hashLong(getScannerId());
18585       }
18586       if (hasMoreResults()) {
18587         hash = (37 * hash) + MORE_RESULTS_FIELD_NUMBER;
18588         hash = (53 * hash) + hashBoolean(getMoreResults());
18589       }
18590       if (hasTtl()) {
18591         hash = (37 * hash) + TTL_FIELD_NUMBER;
18592         hash = (53 * hash) + getTtl();
18593       }
18594       if (getResultsCount() > 0) {
18595         hash = (37 * hash) + RESULTS_FIELD_NUMBER;
18596         hash = (53 * hash) + getResultsList().hashCode();
18597       }
18598       if (hasStale()) {
18599         hash = (37 * hash) + STALE_FIELD_NUMBER;
18600         hash = (53 * hash) + hashBoolean(getStale());
18601       }
18602       if (getPartialFlagPerResultCount() > 0) {
18603         hash = (37 * hash) + PARTIAL_FLAG_PER_RESULT_FIELD_NUMBER;
18604         hash = (53 * hash) + getPartialFlagPerResultList().hashCode();
18605       }
18606       if (hasMoreResultsInRegion()) {
18607         hash = (37 * hash) + MORE_RESULTS_IN_REGION_FIELD_NUMBER;
18608         hash = (53 * hash) + hashBoolean(getMoreResultsInRegion());
18609       }
18610       if (hasHeartbeatMessage()) {
18611         hash = (37 * hash) + HEARTBEAT_MESSAGE_FIELD_NUMBER;
18612         hash = (53 * hash) + hashBoolean(getHeartbeatMessage());
18613       }
18614       hash = (29 * hash) + getUnknownFields().hashCode();
18615       memoizedHashCode = hash;
18616       return hash;
18617     }
18618 
18619     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(
18620         com.google.protobuf.ByteString data)
18621         throws com.google.protobuf.InvalidProtocolBufferException {
18622       return PARSER.parseFrom(data);
18623     }
18624     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(
18625         com.google.protobuf.ByteString data,
18626         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18627         throws com.google.protobuf.InvalidProtocolBufferException {
18628       return PARSER.parseFrom(data, extensionRegistry);
18629     }
18630     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(byte[] data)
18631         throws com.google.protobuf.InvalidProtocolBufferException {
18632       return PARSER.parseFrom(data);
18633     }
18634     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(
18635         byte[] data,
18636         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18637         throws com.google.protobuf.InvalidProtocolBufferException {
18638       return PARSER.parseFrom(data, extensionRegistry);
18639     }
18640     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(java.io.InputStream input)
18641         throws java.io.IOException {
18642       return PARSER.parseFrom(input);
18643     }
18644     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(
18645         java.io.InputStream input,
18646         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18647         throws java.io.IOException {
18648       return PARSER.parseFrom(input, extensionRegistry);
18649     }
18650     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseDelimitedFrom(java.io.InputStream input)
18651         throws java.io.IOException {
18652       return PARSER.parseDelimitedFrom(input);
18653     }
18654     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseDelimitedFrom(
18655         java.io.InputStream input,
18656         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18657         throws java.io.IOException {
18658       return PARSER.parseDelimitedFrom(input, extensionRegistry);
18659     }
18660     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(
18661         com.google.protobuf.CodedInputStream input)
18662         throws java.io.IOException {
18663       return PARSER.parseFrom(input);
18664     }
18665     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(
18666         com.google.protobuf.CodedInputStream input,
18667         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18668         throws java.io.IOException {
18669       return PARSER.parseFrom(input, extensionRegistry);
18670     }
18671 
18672     public static Builder newBuilder() { return Builder.create(); }
18673     public Builder newBuilderForType() { return newBuilder(); }
18674     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse prototype) {
18675       return newBuilder().mergeFrom(prototype);
18676     }
18677     public Builder toBuilder() { return newBuilder(this); }
18678 
18679     @java.lang.Override
18680     protected Builder newBuilderForType(
18681         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18682       Builder builder = new Builder(parent);
18683       return builder;
18684     }
18685     /**
18686      * Protobuf type {@code ScanResponse}
18687      *
18688      * <pre>
18689      **
18690      * The scan response. If there are no more results, more_results will
18691      * be false.  If it is not specified, it means there are more.
18692      * </pre>
18693      */
18694     public static final class Builder extends
18695         com.google.protobuf.GeneratedMessage.Builder<Builder>
18696        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponseOrBuilder {
18697       public static final com.google.protobuf.Descriptors.Descriptor
18698           getDescriptor() {
18699         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_descriptor;
18700       }
18701 
18702       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
18703           internalGetFieldAccessorTable() {
18704         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_fieldAccessorTable
18705             .ensureFieldAccessorsInitialized(
18706                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.Builder.class);
18707       }
18708 
18709       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.newBuilder()
18710       private Builder() {
18711         maybeForceBuilderInitialization();
18712       }
18713 
18714       private Builder(
18715           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18716         super(parent);
18717         maybeForceBuilderInitialization();
18718       }
18719       private void maybeForceBuilderInitialization() {
18720         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
18721           getResultsFieldBuilder();
18722         }
18723       }
18724       private static Builder create() {
18725         return new Builder();
18726       }
18727 
18728       public Builder clear() {
18729         super.clear();
18730         cellsPerResult_ = java.util.Collections.emptyList();
18731         bitField0_ = (bitField0_ & ~0x00000001);
18732         scannerId_ = 0L;
18733         bitField0_ = (bitField0_ & ~0x00000002);
18734         moreResults_ = false;
18735         bitField0_ = (bitField0_ & ~0x00000004);
18736         ttl_ = 0;
18737         bitField0_ = (bitField0_ & ~0x00000008);
18738         if (resultsBuilder_ == null) {
18739           results_ = java.util.Collections.emptyList();
18740           bitField0_ = (bitField0_ & ~0x00000010);
18741         } else {
18742           resultsBuilder_.clear();
18743         }
18744         stale_ = false;
18745         bitField0_ = (bitField0_ & ~0x00000020);
18746         partialFlagPerResult_ = java.util.Collections.emptyList();
18747         bitField0_ = (bitField0_ & ~0x00000040);
18748         moreResultsInRegion_ = false;
18749         bitField0_ = (bitField0_ & ~0x00000080);
18750         heartbeatMessage_ = false;
18751         bitField0_ = (bitField0_ & ~0x00000100);
18752         return this;
18753       }
18754 
18755       public Builder clone() {
18756         return create().mergeFrom(buildPartial());
18757       }
18758 
18759       public com.google.protobuf.Descriptors.Descriptor
18760           getDescriptorForType() {
18761         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_descriptor;
18762       }
18763 
18764       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse getDefaultInstanceForType() {
18765         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance();
18766       }
18767 
18768       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse build() {
18769         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse result = buildPartial();
18770         if (!result.isInitialized()) {
18771           throw newUninitializedMessageException(result);
18772         }
18773         return result;
18774       }
18775 
18776       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse buildPartial() {
18777         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse(this);
18778         int from_bitField0_ = bitField0_;
18779         int to_bitField0_ = 0;
18780         if (((bitField0_ & 0x00000001) == 0x00000001)) {
18781           cellsPerResult_ = java.util.Collections.unmodifiableList(cellsPerResult_);
18782           bitField0_ = (bitField0_ & ~0x00000001);
18783         }
18784         result.cellsPerResult_ = cellsPerResult_;
18785         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
18786           to_bitField0_ |= 0x00000001;
18787         }
18788         result.scannerId_ = scannerId_;
18789         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
18790           to_bitField0_ |= 0x00000002;
18791         }
18792         result.moreResults_ = moreResults_;
18793         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
18794           to_bitField0_ |= 0x00000004;
18795         }
18796         result.ttl_ = ttl_;
18797         if (resultsBuilder_ == null) {
18798           if (((bitField0_ & 0x00000010) == 0x00000010)) {
18799             results_ = java.util.Collections.unmodifiableList(results_);
18800             bitField0_ = (bitField0_ & ~0x00000010);
18801           }
18802           result.results_ = results_;
18803         } else {
18804           result.results_ = resultsBuilder_.build();
18805         }
18806         if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
18807           to_bitField0_ |= 0x00000008;
18808         }
18809         result.stale_ = stale_;
18810         if (((bitField0_ & 0x00000040) == 0x00000040)) {
18811           partialFlagPerResult_ = java.util.Collections.unmodifiableList(partialFlagPerResult_);
18812           bitField0_ = (bitField0_ & ~0x00000040);
18813         }
18814         result.partialFlagPerResult_ = partialFlagPerResult_;
18815         if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
18816           to_bitField0_ |= 0x00000010;
18817         }
18818         result.moreResultsInRegion_ = moreResultsInRegion_;
18819         if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
18820           to_bitField0_ |= 0x00000020;
18821         }
18822         result.heartbeatMessage_ = heartbeatMessage_;
18823         result.bitField0_ = to_bitField0_;
18824         onBuilt();
18825         return result;
18826       }
18827 
18828       public Builder mergeFrom(com.google.protobuf.Message other) {
18829         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) {
18830           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse)other);
18831         } else {
18832           super.mergeFrom(other);
18833           return this;
18834         }
18835       }
18836 
18837       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse other) {
18838         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance()) return this;
18839         if (!other.cellsPerResult_.isEmpty()) {
18840           if (cellsPerResult_.isEmpty()) {
18841             cellsPerResult_ = other.cellsPerResult_;
18842             bitField0_ = (bitField0_ & ~0x00000001);
18843           } else {
18844             ensureCellsPerResultIsMutable();
18845             cellsPerResult_.addAll(other.cellsPerResult_);
18846           }
18847           onChanged();
18848         }
18849         if (other.hasScannerId()) {
18850           setScannerId(other.getScannerId());
18851         }
18852         if (other.hasMoreResults()) {
18853           setMoreResults(other.getMoreResults());
18854         }
18855         if (other.hasTtl()) {
18856           setTtl(other.getTtl());
18857         }
18858         if (resultsBuilder_ == null) {
18859           if (!other.results_.isEmpty()) {
18860             if (results_.isEmpty()) {
18861               results_ = other.results_;
18862               bitField0_ = (bitField0_ & ~0x00000010);
18863             } else {
18864               ensureResultsIsMutable();
18865               results_.addAll(other.results_);
18866             }
18867             onChanged();
18868           }
18869         } else {
18870           if (!other.results_.isEmpty()) {
18871             if (resultsBuilder_.isEmpty()) {
18872               resultsBuilder_.dispose();
18873               resultsBuilder_ = null;
18874               results_ = other.results_;
18875               bitField0_ = (bitField0_ & ~0x00000010);
18876               resultsBuilder_ = 
18877                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
18878                    getResultsFieldBuilder() : null;
18879             } else {
18880               resultsBuilder_.addAllMessages(other.results_);
18881             }
18882           }
18883         }
18884         if (other.hasStale()) {
18885           setStale(other.getStale());
18886         }
18887         if (!other.partialFlagPerResult_.isEmpty()) {
18888           if (partialFlagPerResult_.isEmpty()) {
18889             partialFlagPerResult_ = other.partialFlagPerResult_;
18890             bitField0_ = (bitField0_ & ~0x00000040);
18891           } else {
18892             ensurePartialFlagPerResultIsMutable();
18893             partialFlagPerResult_.addAll(other.partialFlagPerResult_);
18894           }
18895           onChanged();
18896         }
18897         if (other.hasMoreResultsInRegion()) {
18898           setMoreResultsInRegion(other.getMoreResultsInRegion());
18899         }
18900         if (other.hasHeartbeatMessage()) {
18901           setHeartbeatMessage(other.getHeartbeatMessage());
18902         }
18903         this.mergeUnknownFields(other.getUnknownFields());
18904         return this;
18905       }
18906 
18907       public final boolean isInitialized() {
18908         return true;
18909       }
18910 
18911       public Builder mergeFrom(
18912           com.google.protobuf.CodedInputStream input,
18913           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18914           throws java.io.IOException {
18915         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parsedMessage = null;
18916         try {
18917           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
18918         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18919           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) e.getUnfinishedMessage();
18920           throw e;
18921         } finally {
18922           if (parsedMessage != null) {
18923             mergeFrom(parsedMessage);
18924           }
18925         }
18926         return this;
18927       }
18928       private int bitField0_;
18929 
18930       // repeated uint32 cells_per_result = 1;
18931       private java.util.List<java.lang.Integer> cellsPerResult_ = java.util.Collections.emptyList();
18932       private void ensureCellsPerResultIsMutable() {
18933         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
18934           cellsPerResult_ = new java.util.ArrayList<java.lang.Integer>(cellsPerResult_);
18935           bitField0_ |= 0x00000001;
18936          }
18937       }
18938       /**
18939        * <code>repeated uint32 cells_per_result = 1;</code>
18940        *
18941        * <pre>
18942        * This field is filled in if we are doing cellblocks.  A cellblock is made up
18943        * of all Cells serialized out as one cellblock BUT responses from a server
18944        * have their Cells grouped by Result.  So we can reconstitute the
18945        * Results on the client-side, this field is a list of counts of Cells
18946        * in each Result that makes up the response.  For example, if this field
18947        * has 3, 3, 3 in it, then we know that on the client, we are to make
18948        * three Results each of three Cells each.
18949        * </pre>
18950        */
18951       public java.util.List<java.lang.Integer>
18952           getCellsPerResultList() {
18953         return java.util.Collections.unmodifiableList(cellsPerResult_);
18954       }
18955       /**
18956        * <code>repeated uint32 cells_per_result = 1;</code>
18957        *
18958        * <pre>
18959        * This field is filled in if we are doing cellblocks.  A cellblock is made up
18960        * of all Cells serialized out as one cellblock BUT responses from a server
18961        * have their Cells grouped by Result.  So we can reconstitute the
18962        * Results on the client-side, this field is a list of counts of Cells
18963        * in each Result that makes up the response.  For example, if this field
18964        * has 3, 3, 3 in it, then we know that on the client, we are to make
18965        * three Results each of three Cells each.
18966        * </pre>
18967        */
18968       public int getCellsPerResultCount() {
18969         return cellsPerResult_.size();
18970       }
18971       /**
18972        * <code>repeated uint32 cells_per_result = 1;</code>
18973        *
18974        * <pre>
18975        * This field is filled in if we are doing cellblocks.  A cellblock is made up
18976        * of all Cells serialized out as one cellblock BUT responses from a server
18977        * have their Cells grouped by Result.  So we can reconstitute the
18978        * Results on the client-side, this field is a list of counts of Cells
18979        * in each Result that makes up the response.  For example, if this field
18980        * has 3, 3, 3 in it, then we know that on the client, we are to make
18981        * three Results each of three Cells each.
18982        * </pre>
18983        */
18984       public int getCellsPerResult(int index) {
18985         return cellsPerResult_.get(index);
18986       }
18987       /**
18988        * <code>repeated uint32 cells_per_result = 1;</code>
18989        *
18990        * <pre>
18991        * This field is filled in if we are doing cellblocks.  A cellblock is made up
18992        * of all Cells serialized out as one cellblock BUT responses from a server
18993        * have their Cells grouped by Result.  So we can reconstitute the
18994        * Results on the client-side, this field is a list of counts of Cells
18995        * in each Result that makes up the response.  For example, if this field
18996        * has 3, 3, 3 in it, then we know that on the client, we are to make
18997        * three Results each of three Cells each.
18998        * </pre>
18999        */
19000       public Builder setCellsPerResult(
19001           int index, int value) {
19002         ensureCellsPerResultIsMutable();
19003         cellsPerResult_.set(index, value);
19004         onChanged();
19005         return this;
19006       }
19007       /**
19008        * <code>repeated uint32 cells_per_result = 1;</code>
19009        *
19010        * <pre>
19011        * This field is filled in if we are doing cellblocks.  A cellblock is made up
19012        * of all Cells serialized out as one cellblock BUT responses from a server
19013        * have their Cells grouped by Result.  So we can reconstitute the
19014        * Results on the client-side, this field is a list of counts of Cells
19015        * in each Result that makes up the response.  For example, if this field
19016        * has 3, 3, 3 in it, then we know that on the client, we are to make
19017        * three Results each of three Cells each.
19018        * </pre>
19019        */
19020       public Builder addCellsPerResult(int value) {
19021         ensureCellsPerResultIsMutable();
19022         cellsPerResult_.add(value);
19023         onChanged();
19024         return this;
19025       }
19026       /**
19027        * <code>repeated uint32 cells_per_result = 1;</code>
19028        *
19029        * <pre>
19030        * This field is filled in if we are doing cellblocks.  A cellblock is made up
19031        * of all Cells serialized out as one cellblock BUT responses from a server
19032        * have their Cells grouped by Result.  So we can reconstitute the
19033        * Results on the client-side, this field is a list of counts of Cells
19034        * in each Result that makes up the response.  For example, if this field
19035        * has 3, 3, 3 in it, then we know that on the client, we are to make
19036        * three Results each of three Cells each.
19037        * </pre>
19038        */
19039       public Builder addAllCellsPerResult(
19040           java.lang.Iterable<? extends java.lang.Integer> values) {
19041         ensureCellsPerResultIsMutable();
19042         super.addAll(values, cellsPerResult_);
19043         onChanged();
19044         return this;
19045       }
19046       /**
19047        * <code>repeated uint32 cells_per_result = 1;</code>
19048        *
19049        * <pre>
19050        * This field is filled in if we are doing cellblocks.  A cellblock is made up
19051        * of all Cells serialized out as one cellblock BUT responses from a server
19052        * have their Cells grouped by Result.  So we can reconstitute the
19053        * Results on the client-side, this field is a list of counts of Cells
19054        * in each Result that makes up the response.  For example, if this field
19055        * has 3, 3, 3 in it, then we know that on the client, we are to make
19056        * three Results each of three Cells each.
19057        * </pre>
19058        */
19059       public Builder clearCellsPerResult() {
19060         cellsPerResult_ = java.util.Collections.emptyList();
19061         bitField0_ = (bitField0_ & ~0x00000001);
19062         onChanged();
19063         return this;
19064       }
19065 
19066       // optional uint64 scanner_id = 2;
19067       private long scannerId_ ;
19068       /**
19069        * <code>optional uint64 scanner_id = 2;</code>
19070        */
19071       public boolean hasScannerId() {
19072         return ((bitField0_ & 0x00000002) == 0x00000002);
19073       }
19074       /**
19075        * <code>optional uint64 scanner_id = 2;</code>
19076        */
19077       public long getScannerId() {
19078         return scannerId_;
19079       }
19080       /**
19081        * <code>optional uint64 scanner_id = 2;</code>
19082        */
19083       public Builder setScannerId(long value) {
19084         bitField0_ |= 0x00000002;
19085         scannerId_ = value;
19086         onChanged();
19087         return this;
19088       }
19089       /**
19090        * <code>optional uint64 scanner_id = 2;</code>
19091        */
19092       public Builder clearScannerId() {
19093         bitField0_ = (bitField0_ & ~0x00000002);
19094         scannerId_ = 0L;
19095         onChanged();
19096         return this;
19097       }
19098 
19099       // optional bool more_results = 3;
19100       private boolean moreResults_ ;
19101       /**
19102        * <code>optional bool more_results = 3;</code>
19103        */
19104       public boolean hasMoreResults() {
19105         return ((bitField0_ & 0x00000004) == 0x00000004);
19106       }
19107       /**
19108        * <code>optional bool more_results = 3;</code>
19109        */
19110       public boolean getMoreResults() {
19111         return moreResults_;
19112       }
19113       /**
19114        * <code>optional bool more_results = 3;</code>
19115        */
19116       public Builder setMoreResults(boolean value) {
19117         bitField0_ |= 0x00000004;
19118         moreResults_ = value;
19119         onChanged();
19120         return this;
19121       }
19122       /**
19123        * <code>optional bool more_results = 3;</code>
19124        */
19125       public Builder clearMoreResults() {
19126         bitField0_ = (bitField0_ & ~0x00000004);
19127         moreResults_ = false;
19128         onChanged();
19129         return this;
19130       }
19131 
19132       // optional uint32 ttl = 4;
19133       private int ttl_ ;
19134       /**
19135        * <code>optional uint32 ttl = 4;</code>
19136        */
19137       public boolean hasTtl() {
19138         return ((bitField0_ & 0x00000008) == 0x00000008);
19139       }
19140       /**
19141        * <code>optional uint32 ttl = 4;</code>
19142        */
19143       public int getTtl() {
19144         return ttl_;
19145       }
19146       /**
19147        * <code>optional uint32 ttl = 4;</code>
19148        */
19149       public Builder setTtl(int value) {
19150         bitField0_ |= 0x00000008;
19151         ttl_ = value;
19152         onChanged();
19153         return this;
19154       }
19155       /**
19156        * <code>optional uint32 ttl = 4;</code>
19157        */
19158       public Builder clearTtl() {
19159         bitField0_ = (bitField0_ & ~0x00000008);
19160         ttl_ = 0;
19161         onChanged();
19162         return this;
19163       }
19164 
19165       // repeated .Result results = 5;
19166       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> results_ =
19167         java.util.Collections.emptyList();
19168       private void ensureResultsIsMutable() {
19169         if (!((bitField0_ & 0x00000010) == 0x00000010)) {
19170           results_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result>(results_);
19171           bitField0_ |= 0x00000010;
19172          }
19173       }
19174 
19175       private com.google.protobuf.RepeatedFieldBuilder<
19176           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultsBuilder_;
19177 
19178       /**
19179        * <code>repeated .Result results = 5;</code>
19180        *
19181        * <pre>
19182        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19183        * This field is mutually exclusive with cells_per_result (since the Cells will
19184        * be inside the pb'd Result)
19185        * </pre>
19186        */
19187       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> getResultsList() {
19188         if (resultsBuilder_ == null) {
19189           return java.util.Collections.unmodifiableList(results_);
19190         } else {
19191           return resultsBuilder_.getMessageList();
19192         }
19193       }
19194       /**
19195        * <code>repeated .Result results = 5;</code>
19196        *
19197        * <pre>
19198        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19199        * This field is mutually exclusive with cells_per_result (since the Cells will
19200        * be inside the pb'd Result)
19201        * </pre>
19202        */
19203       public int getResultsCount() {
19204         if (resultsBuilder_ == null) {
19205           return results_.size();
19206         } else {
19207           return resultsBuilder_.getCount();
19208         }
19209       }
19210       /**
19211        * <code>repeated .Result results = 5;</code>
19212        *
19213        * <pre>
19214        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19215        * This field is mutually exclusive with cells_per_result (since the Cells will
19216        * be inside the pb'd Result)
19217        * </pre>
19218        */
19219       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResults(int index) {
19220         if (resultsBuilder_ == null) {
19221           return results_.get(index);
19222         } else {
19223           return resultsBuilder_.getMessage(index);
19224         }
19225       }
19226       /**
19227        * <code>repeated .Result results = 5;</code>
19228        *
19229        * <pre>
19230        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19231        * This field is mutually exclusive with cells_per_result (since the Cells will
19232        * be inside the pb'd Result)
19233        * </pre>
19234        */
19235       public Builder setResults(
19236           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
19237         if (resultsBuilder_ == null) {
19238           if (value == null) {
19239             throw new NullPointerException();
19240           }
19241           ensureResultsIsMutable();
19242           results_.set(index, value);
19243           onChanged();
19244         } else {
19245           resultsBuilder_.setMessage(index, value);
19246         }
19247         return this;
19248       }
19249       /**
19250        * <code>repeated .Result results = 5;</code>
19251        *
19252        * <pre>
19253        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19254        * This field is mutually exclusive with cells_per_result (since the Cells will
19255        * be inside the pb'd Result)
19256        * </pre>
19257        */
19258       public Builder setResults(
19259           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
19260         if (resultsBuilder_ == null) {
19261           ensureResultsIsMutable();
19262           results_.set(index, builderForValue.build());
19263           onChanged();
19264         } else {
19265           resultsBuilder_.setMessage(index, builderForValue.build());
19266         }
19267         return this;
19268       }
19269       /**
19270        * <code>repeated .Result results = 5;</code>
19271        *
19272        * <pre>
19273        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19274        * This field is mutually exclusive with cells_per_result (since the Cells will
19275        * be inside the pb'd Result)
19276        * </pre>
19277        */
19278       public Builder addResults(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
19279         if (resultsBuilder_ == null) {
19280           if (value == null) {
19281             throw new NullPointerException();
19282           }
19283           ensureResultsIsMutable();
19284           results_.add(value);
19285           onChanged();
19286         } else {
19287           resultsBuilder_.addMessage(value);
19288         }
19289         return this;
19290       }
19291       /**
19292        * <code>repeated .Result results = 5;</code>
19293        *
19294        * <pre>
19295        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19296        * This field is mutually exclusive with cells_per_result (since the Cells will
19297        * be inside the pb'd Result)
19298        * </pre>
19299        */
19300       public Builder addResults(
19301           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
19302         if (resultsBuilder_ == null) {
19303           if (value == null) {
19304             throw new NullPointerException();
19305           }
19306           ensureResultsIsMutable();
19307           results_.add(index, value);
19308           onChanged();
19309         } else {
19310           resultsBuilder_.addMessage(index, value);
19311         }
19312         return this;
19313       }
19314       /**
19315        * <code>repeated .Result results = 5;</code>
19316        *
19317        * <pre>
19318        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19319        * This field is mutually exclusive with cells_per_result (since the Cells will
19320        * be inside the pb'd Result)
19321        * </pre>
19322        */
19323       public Builder addResults(
19324           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
19325         if (resultsBuilder_ == null) {
19326           ensureResultsIsMutable();
19327           results_.add(builderForValue.build());
19328           onChanged();
19329         } else {
19330           resultsBuilder_.addMessage(builderForValue.build());
19331         }
19332         return this;
19333       }
19334       /**
19335        * <code>repeated .Result results = 5;</code>
19336        *
19337        * <pre>
19338        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19339        * This field is mutually exclusive with cells_per_result (since the Cells will
19340        * be inside the pb'd Result)
19341        * </pre>
19342        */
19343       public Builder addResults(
19344           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
19345         if (resultsBuilder_ == null) {
19346           ensureResultsIsMutable();
19347           results_.add(index, builderForValue.build());
19348           onChanged();
19349         } else {
19350           resultsBuilder_.addMessage(index, builderForValue.build());
19351         }
19352         return this;
19353       }
19354       /**
19355        * <code>repeated .Result results = 5;</code>
19356        *
19357        * <pre>
19358        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19359        * This field is mutually exclusive with cells_per_result (since the Cells will
19360        * be inside the pb'd Result)
19361        * </pre>
19362        */
19363       public Builder addAllResults(
19364           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> values) {
19365         if (resultsBuilder_ == null) {
19366           ensureResultsIsMutable();
19367           super.addAll(values, results_);
19368           onChanged();
19369         } else {
19370           resultsBuilder_.addAllMessages(values);
19371         }
19372         return this;
19373       }
19374       /**
19375        * <code>repeated .Result results = 5;</code>
19376        *
19377        * <pre>
19378        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19379        * This field is mutually exclusive with cells_per_result (since the Cells will
19380        * be inside the pb'd Result)
19381        * </pre>
19382        */
19383       public Builder clearResults() {
19384         if (resultsBuilder_ == null) {
19385           results_ = java.util.Collections.emptyList();
19386           bitField0_ = (bitField0_ & ~0x00000010);
19387           onChanged();
19388         } else {
19389           resultsBuilder_.clear();
19390         }
19391         return this;
19392       }
19393       /**
19394        * <code>repeated .Result results = 5;</code>
19395        *
19396        * <pre>
19397        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19398        * This field is mutually exclusive with cells_per_result (since the Cells will
19399        * be inside the pb'd Result)
19400        * </pre>
19401        */
19402       public Builder removeResults(int index) {
19403         if (resultsBuilder_ == null) {
19404           ensureResultsIsMutable();
19405           results_.remove(index);
19406           onChanged();
19407         } else {
19408           resultsBuilder_.remove(index);
19409         }
19410         return this;
19411       }
19412       /**
19413        * <code>repeated .Result results = 5;</code>
19414        *
19415        * <pre>
19416        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19417        * This field is mutually exclusive with cells_per_result (since the Cells will
19418        * be inside the pb'd Result)
19419        * </pre>
19420        */
19421       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultsBuilder(
19422           int index) {
19423         return getResultsFieldBuilder().getBuilder(index);
19424       }
19425       /**
19426        * <code>repeated .Result results = 5;</code>
19427        *
19428        * <pre>
19429        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19430        * This field is mutually exclusive with cells_per_result (since the Cells will
19431        * be inside the pb'd Result)
19432        * </pre>
19433        */
19434       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultsOrBuilder(
19435           int index) {
19436         if (resultsBuilder_ == null) {
19437           return results_.get(index);  } else {
19438           return resultsBuilder_.getMessageOrBuilder(index);
19439         }
19440       }
19441       /**
19442        * <code>repeated .Result results = 5;</code>
19443        *
19444        * <pre>
19445        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19446        * This field is mutually exclusive with cells_per_result (since the Cells will
19447        * be inside the pb'd Result)
19448        * </pre>
19449        */
19450       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> 
19451            getResultsOrBuilderList() {
19452         if (resultsBuilder_ != null) {
19453           return resultsBuilder_.getMessageOrBuilderList();
19454         } else {
19455           return java.util.Collections.unmodifiableList(results_);
19456         }
19457       }
19458       /**
19459        * <code>repeated .Result results = 5;</code>
19460        *
19461        * <pre>
19462        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19463        * This field is mutually exclusive with cells_per_result (since the Cells will
19464        * be inside the pb'd Result)
19465        * </pre>
19466        */
19467       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder addResultsBuilder() {
19468         return getResultsFieldBuilder().addBuilder(
19469             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance());
19470       }
19471       /**
19472        * <code>repeated .Result results = 5;</code>
19473        *
19474        * <pre>
19475        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19476        * This field is mutually exclusive with cells_per_result (since the Cells will
19477        * be inside the pb'd Result)
19478        * </pre>
19479        */
19480       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder addResultsBuilder(
19481           int index) {
19482         return getResultsFieldBuilder().addBuilder(
19483             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance());
19484       }
19485       /**
19486        * <code>repeated .Result results = 5;</code>
19487        *
19488        * <pre>
19489        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19490        * This field is mutually exclusive with cells_per_result (since the Cells will
19491        * be inside the pb'd Result)
19492        * </pre>
19493        */
19494       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder> 
19495            getResultsBuilderList() {
19496         return getResultsFieldBuilder().getBuilderList();
19497       }
19498       private com.google.protobuf.RepeatedFieldBuilder<
19499           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> 
19500           getResultsFieldBuilder() {
19501         if (resultsBuilder_ == null) {
19502           resultsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
19503               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>(
19504                   results_,
19505                   ((bitField0_ & 0x00000010) == 0x00000010),
19506                   getParentForChildren(),
19507                   isClean());
19508           results_ = null;
19509         }
19510         return resultsBuilder_;
19511       }
19512 
19513       // optional bool stale = 6;
19514       private boolean stale_ ;
19515       /**
19516        * <code>optional bool stale = 6;</code>
19517        */
19518       public boolean hasStale() {
19519         return ((bitField0_ & 0x00000020) == 0x00000020);
19520       }
19521       /**
19522        * <code>optional bool stale = 6;</code>
19523        */
19524       public boolean getStale() {
19525         return stale_;
19526       }
19527       /**
19528        * <code>optional bool stale = 6;</code>
19529        */
19530       public Builder setStale(boolean value) {
19531         bitField0_ |= 0x00000020;
19532         stale_ = value;
19533         onChanged();
19534         return this;
19535       }
19536       /**
19537        * <code>optional bool stale = 6;</code>
19538        */
19539       public Builder clearStale() {
19540         bitField0_ = (bitField0_ & ~0x00000020);
19541         stale_ = false;
19542         onChanged();
19543         return this;
19544       }
19545 
19546       // repeated bool partial_flag_per_result = 7;
19547       private java.util.List<java.lang.Boolean> partialFlagPerResult_ = java.util.Collections.emptyList();
19548       private void ensurePartialFlagPerResultIsMutable() {
19549         if (!((bitField0_ & 0x00000040) == 0x00000040)) {
19550           partialFlagPerResult_ = new java.util.ArrayList<java.lang.Boolean>(partialFlagPerResult_);
19551           bitField0_ |= 0x00000040;
19552          }
19553       }
19554       /**
19555        * <code>repeated bool partial_flag_per_result = 7;</code>
19556        *
19557        * <pre>
19558        * This field is filled in if we are doing cellblocks. In the event that a row
19559        * could not fit all of its cells into a single RPC chunk, the results will be
19560        * returned as partials, and reconstructed into a complete result on the client
19561        * side. This field is a list of flags indicating whether or not the result
19562        * that the cells belong to is a partial result. For example, if this field
19563        * has false, false, true in it, then we know that on the client side, we need to
19564        * make another RPC request since the last result was only a partial.
19565        * </pre>
19566        */
19567       public java.util.List<java.lang.Boolean>
19568           getPartialFlagPerResultList() {
19569         return java.util.Collections.unmodifiableList(partialFlagPerResult_);
19570       }
19571       /**
19572        * <code>repeated bool partial_flag_per_result = 7;</code>
19573        *
19574        * <pre>
19575        * This field is filled in if we are doing cellblocks. In the event that a row
19576        * could not fit all of its cells into a single RPC chunk, the results will be
19577        * returned as partials, and reconstructed into a complete result on the client
19578        * side. This field is a list of flags indicating whether or not the result
19579        * that the cells belong to is a partial result. For example, if this field
19580        * has false, false, true in it, then we know that on the client side, we need to
19581        * make another RPC request since the last result was only a partial.
19582        * </pre>
19583        */
19584       public int getPartialFlagPerResultCount() {
19585         return partialFlagPerResult_.size();
19586       }
19587       /**
19588        * <code>repeated bool partial_flag_per_result = 7;</code>
19589        *
19590        * <pre>
19591        * This field is filled in if we are doing cellblocks. In the event that a row
19592        * could not fit all of its cells into a single RPC chunk, the results will be
19593        * returned as partials, and reconstructed into a complete result on the client
19594        * side. This field is a list of flags indicating whether or not the result
19595        * that the cells belong to is a partial result. For example, if this field
19596        * has false, false, true in it, then we know that on the client side, we need to
19597        * make another RPC request since the last result was only a partial.
19598        * </pre>
19599        */
19600       public boolean getPartialFlagPerResult(int index) {
19601         return partialFlagPerResult_.get(index);
19602       }
19603       /**
19604        * <code>repeated bool partial_flag_per_result = 7;</code>
19605        *
19606        * <pre>
19607        * This field is filled in if we are doing cellblocks. In the event that a row
19608        * could not fit all of its cells into a single RPC chunk, the results will be
19609        * returned as partials, and reconstructed into a complete result on the client
19610        * side. This field is a list of flags indicating whether or not the result
19611        * that the cells belong to is a partial result. For example, if this field
19612        * has false, false, true in it, then we know that on the client side, we need to
19613        * make another RPC request since the last result was only a partial.
19614        * </pre>
19615        */
19616       public Builder setPartialFlagPerResult(
19617           int index, boolean value) {
19618         ensurePartialFlagPerResultIsMutable();
19619         partialFlagPerResult_.set(index, value);
19620         onChanged();
19621         return this;
19622       }
19623       /**
19624        * <code>repeated bool partial_flag_per_result = 7;</code>
19625        *
19626        * <pre>
19627        * This field is filled in if we are doing cellblocks. In the event that a row
19628        * could not fit all of its cells into a single RPC chunk, the results will be
19629        * returned as partials, and reconstructed into a complete result on the client
19630        * side. This field is a list of flags indicating whether or not the result
19631        * that the cells belong to is a partial result. For example, if this field
19632        * has false, false, true in it, then we know that on the client side, we need to
19633        * make another RPC request since the last result was only a partial.
19634        * </pre>
19635        */
19636       public Builder addPartialFlagPerResult(boolean value) {
19637         ensurePartialFlagPerResultIsMutable();
19638         partialFlagPerResult_.add(value);
19639         onChanged();
19640         return this;
19641       }
19642       /**
19643        * <code>repeated bool partial_flag_per_result = 7;</code>
19644        *
19645        * <pre>
19646        * This field is filled in if we are doing cellblocks. In the event that a row
19647        * could not fit all of its cells into a single RPC chunk, the results will be
19648        * returned as partials, and reconstructed into a complete result on the client
19649        * side. This field is a list of flags indicating whether or not the result
19650        * that the cells belong to is a partial result. For example, if this field
19651        * has false, false, true in it, then we know that on the client side, we need to
19652        * make another RPC request since the last result was only a partial.
19653        * </pre>
19654        */
19655       public Builder addAllPartialFlagPerResult(
19656           java.lang.Iterable<? extends java.lang.Boolean> values) {
19657         ensurePartialFlagPerResultIsMutable();
19658         super.addAll(values, partialFlagPerResult_);
19659         onChanged();
19660         return this;
19661       }
19662       /**
19663        * <code>repeated bool partial_flag_per_result = 7;</code>
19664        *
19665        * <pre>
19666        * This field is filled in if we are doing cellblocks. In the event that a row
19667        * could not fit all of its cells into a single RPC chunk, the results will be
19668        * returned as partials, and reconstructed into a complete result on the client
19669        * side. This field is a list of flags indicating whether or not the result
19670        * that the cells belong to is a partial result. For example, if this field
19671        * has false, false, true in it, then we know that on the client side, we need to
19672        * make another RPC request since the last result was only a partial.
19673        * </pre>
19674        */
19675       public Builder clearPartialFlagPerResult() {
19676         partialFlagPerResult_ = java.util.Collections.emptyList();
19677         bitField0_ = (bitField0_ & ~0x00000040);
19678         onChanged();
19679         return this;
19680       }
19681 
19682       // optional bool more_results_in_region = 8;
19683       private boolean moreResultsInRegion_ ;
19684       /**
19685        * <code>optional bool more_results_in_region = 8;</code>
19686        *
19687        * <pre>
19688        * A server may choose to limit the number of results returned to the client for
19689        * reasons such as the size in bytes or quantity of results accumulated. This field
19690        * will true when more results exist in the current region.
19691        * </pre>
19692        */
19693       public boolean hasMoreResultsInRegion() {
19694         return ((bitField0_ & 0x00000080) == 0x00000080);
19695       }
19696       /**
19697        * <code>optional bool more_results_in_region = 8;</code>
19698        *
19699        * <pre>
19700        * A server may choose to limit the number of results returned to the client for
19701        * reasons such as the size in bytes or quantity of results accumulated. This field
19702        * will true when more results exist in the current region.
19703        * </pre>
19704        */
19705       public boolean getMoreResultsInRegion() {
19706         return moreResultsInRegion_;
19707       }
19708       /**
19709        * <code>optional bool more_results_in_region = 8;</code>
19710        *
19711        * <pre>
19712        * A server may choose to limit the number of results returned to the client for
19713        * reasons such as the size in bytes or quantity of results accumulated. This field
19714        * will true when more results exist in the current region.
19715        * </pre>
19716        */
19717       public Builder setMoreResultsInRegion(boolean value) {
19718         bitField0_ |= 0x00000080;
19719         moreResultsInRegion_ = value;
19720         onChanged();
19721         return this;
19722       }
19723       /**
19724        * <code>optional bool more_results_in_region = 8;</code>
19725        *
19726        * <pre>
19727        * A server may choose to limit the number of results returned to the client for
19728        * reasons such as the size in bytes or quantity of results accumulated. This field
19729        * will true when more results exist in the current region.
19730        * </pre>
19731        */
19732       public Builder clearMoreResultsInRegion() {
19733         bitField0_ = (bitField0_ & ~0x00000080);
19734         moreResultsInRegion_ = false;
19735         onChanged();
19736         return this;
19737       }
19738 
19739       // optional bool heartbeat_message = 9;
19740       private boolean heartbeatMessage_ ;
19741       /**
19742        * <code>optional bool heartbeat_message = 9;</code>
19743        *
19744        * <pre>
19745        * This field is filled in if the server is sending back a heartbeat message.
19746        * Heartbeat messages are sent back to the client to prevent the scanner from
19747        * timing out. Seeing a heartbeat message communicates to the Client that the
19748        * server would have continued to scan had the time limit not been reached.
19749        * </pre>
19750        */
19751       public boolean hasHeartbeatMessage() {
19752         return ((bitField0_ & 0x00000100) == 0x00000100);
19753       }
19754       /**
19755        * <code>optional bool heartbeat_message = 9;</code>
19756        *
19757        * <pre>
19758        * This field is filled in if the server is sending back a heartbeat message.
19759        * Heartbeat messages are sent back to the client to prevent the scanner from
19760        * timing out. Seeing a heartbeat message communicates to the Client that the
19761        * server would have continued to scan had the time limit not been reached.
19762        * </pre>
19763        */
19764       public boolean getHeartbeatMessage() {
19765         return heartbeatMessage_;
19766       }
19767       /**
19768        * <code>optional bool heartbeat_message = 9;</code>
19769        *
19770        * <pre>
19771        * This field is filled in if the server is sending back a heartbeat message.
19772        * Heartbeat messages are sent back to the client to prevent the scanner from
19773        * timing out. Seeing a heartbeat message communicates to the Client that the
19774        * server would have continued to scan had the time limit not been reached.
19775        * </pre>
19776        */
19777       public Builder setHeartbeatMessage(boolean value) {
19778         bitField0_ |= 0x00000100;
19779         heartbeatMessage_ = value;
19780         onChanged();
19781         return this;
19782       }
19783       /**
19784        * <code>optional bool heartbeat_message = 9;</code>
19785        *
19786        * <pre>
19787        * This field is filled in if the server is sending back a heartbeat message.
19788        * Heartbeat messages are sent back to the client to prevent the scanner from
19789        * timing out. Seeing a heartbeat message communicates to the Client that the
19790        * server would have continued to scan had the time limit not been reached.
19791        * </pre>
19792        */
19793       public Builder clearHeartbeatMessage() {
19794         bitField0_ = (bitField0_ & ~0x00000100);
19795         heartbeatMessage_ = false;
19796         onChanged();
19797         return this;
19798       }
19799 
19800       // @@protoc_insertion_point(builder_scope:ScanResponse)
19801     }
19802 
19803     static {
19804       defaultInstance = new ScanResponse(true);
19805       defaultInstance.initFields();
19806     }
19807 
19808     // @@protoc_insertion_point(class_scope:ScanResponse)
19809   }
19810 
19811   public interface BulkLoadHFileRequestOrBuilder
19812       extends com.google.protobuf.MessageOrBuilder {
19813 
19814     // required .RegionSpecifier region = 1;
19815     /**
19816      * <code>required .RegionSpecifier region = 1;</code>
19817      */
19818     boolean hasRegion();
19819     /**
19820      * <code>required .RegionSpecifier region = 1;</code>
19821      */
19822     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
19823     /**
19824      * <code>required .RegionSpecifier region = 1;</code>
19825      */
19826     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
19827 
19828     // repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;
19829     /**
19830      * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
19831      */
19832     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> 
19833         getFamilyPathList();
19834     /**
19835      * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
19836      */
19837     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index);
19838     /**
19839      * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
19840      */
19841     int getFamilyPathCount();
19842     /**
19843      * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
19844      */
19845     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> 
19846         getFamilyPathOrBuilderList();
19847     /**
19848      * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
19849      */
19850     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder(
19851         int index);
19852 
19853     // optional bool assign_seq_num = 3;
19854     /**
19855      * <code>optional bool assign_seq_num = 3;</code>
19856      */
19857     boolean hasAssignSeqNum();
19858     /**
19859      * <code>optional bool assign_seq_num = 3;</code>
19860      */
19861     boolean getAssignSeqNum();
19862   }
19863   /**
19864    * Protobuf type {@code BulkLoadHFileRequest}
19865    *
19866    * <pre>
19867    **
19868    * Atomically bulk load multiple HFiles (say from different column families)
19869    * into an open region.
19870    * </pre>
19871    */
19872   public static final class BulkLoadHFileRequest extends
19873       com.google.protobuf.GeneratedMessage
19874       implements BulkLoadHFileRequestOrBuilder {
19875     // Use BulkLoadHFileRequest.newBuilder() to construct.
19876     private BulkLoadHFileRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
19877       super(builder);
19878       this.unknownFields = builder.getUnknownFields();
19879     }
19880     private BulkLoadHFileRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
19881 
19882     private static final BulkLoadHFileRequest defaultInstance;
19883     public static BulkLoadHFileRequest getDefaultInstance() {
19884       return defaultInstance;
19885     }
19886 
19887     public BulkLoadHFileRequest getDefaultInstanceForType() {
19888       return defaultInstance;
19889     }
19890 
19891     private final com.google.protobuf.UnknownFieldSet unknownFields;
19892     @java.lang.Override
19893     public final com.google.protobuf.UnknownFieldSet
19894         getUnknownFields() {
19895       return this.unknownFields;
19896     }
19897     private BulkLoadHFileRequest(
19898         com.google.protobuf.CodedInputStream input,
19899         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19900         throws com.google.protobuf.InvalidProtocolBufferException {
19901       initFields();
19902       int mutable_bitField0_ = 0;
19903       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
19904           com.google.protobuf.UnknownFieldSet.newBuilder();
19905       try {
19906         boolean done = false;
19907         while (!done) {
19908           int tag = input.readTag();
19909           switch (tag) {
19910             case 0:
19911               done = true;
19912               break;
19913             default: {
19914               if (!parseUnknownField(input, unknownFields,
19915                                      extensionRegistry, tag)) {
19916                 done = true;
19917               }
19918               break;
19919             }
19920             case 10: {
19921               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
19922               if (((bitField0_ & 0x00000001) == 0x00000001)) {
19923                 subBuilder = region_.toBuilder();
19924               }
19925               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
19926               if (subBuilder != null) {
19927                 subBuilder.mergeFrom(region_);
19928                 region_ = subBuilder.buildPartial();
19929               }
19930               bitField0_ |= 0x00000001;
19931               break;
19932             }
19933             case 18: {
19934               if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
19935                 familyPath_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath>();
19936                 mutable_bitField0_ |= 0x00000002;
19937               }
19938               familyPath_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.PARSER, extensionRegistry));
19939               break;
19940             }
19941             case 24: {
19942               bitField0_ |= 0x00000002;
19943               assignSeqNum_ = input.readBool();
19944               break;
19945             }
19946           }
19947         }
19948       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
19949         throw e.setUnfinishedMessage(this);
19950       } catch (java.io.IOException e) {
19951         throw new com.google.protobuf.InvalidProtocolBufferException(
19952             e.getMessage()).setUnfinishedMessage(this);
19953       } finally {
19954         if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
19955           familyPath_ = java.util.Collections.unmodifiableList(familyPath_);
19956         }
19957         this.unknownFields = unknownFields.build();
19958         makeExtensionsImmutable();
19959       }
19960     }
19961     public static final com.google.protobuf.Descriptors.Descriptor
19962         getDescriptor() {
19963       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_descriptor;
19964     }
19965 
19966     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
19967         internalGetFieldAccessorTable() {
19968       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_fieldAccessorTable
19969           .ensureFieldAccessorsInitialized(
19970               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.Builder.class);
19971     }
19972 
19973     public static com.google.protobuf.Parser<BulkLoadHFileRequest> PARSER =
19974         new com.google.protobuf.AbstractParser<BulkLoadHFileRequest>() {
19975       public BulkLoadHFileRequest parsePartialFrom(
19976           com.google.protobuf.CodedInputStream input,
19977           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19978           throws com.google.protobuf.InvalidProtocolBufferException {
19979         return new BulkLoadHFileRequest(input, extensionRegistry);
19980       }
19981     };
19982 
19983     @java.lang.Override
19984     public com.google.protobuf.Parser<BulkLoadHFileRequest> getParserForType() {
19985       return PARSER;
19986     }
19987 
19988     public interface FamilyPathOrBuilder
19989         extends com.google.protobuf.MessageOrBuilder {
19990 
19991       // required bytes family = 1;
19992       /**
19993        * <code>required bytes family = 1;</code>
19994        */
19995       boolean hasFamily();
19996       /**
19997        * <code>required bytes family = 1;</code>
19998        */
19999       com.google.protobuf.ByteString getFamily();
20000 
20001       // required string path = 2;
20002       /**
20003        * <code>required string path = 2;</code>
20004        */
20005       boolean hasPath();
20006       /**
20007        * <code>required string path = 2;</code>
20008        */
20009       java.lang.String getPath();
20010       /**
20011        * <code>required string path = 2;</code>
20012        */
20013       com.google.protobuf.ByteString
20014           getPathBytes();
20015     }
20016     /**
20017      * Protobuf type {@code BulkLoadHFileRequest.FamilyPath}
20018      */
20019     public static final class FamilyPath extends
20020         com.google.protobuf.GeneratedMessage
20021         implements FamilyPathOrBuilder {
20022       // Use FamilyPath.newBuilder() to construct.
20023       private FamilyPath(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
20024         super(builder);
20025         this.unknownFields = builder.getUnknownFields();
20026       }
20027       private FamilyPath(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
20028 
20029       private static final FamilyPath defaultInstance;
20030       public static FamilyPath getDefaultInstance() {
20031         return defaultInstance;
20032       }
20033 
20034       public FamilyPath getDefaultInstanceForType() {
20035         return defaultInstance;
20036       }
20037 
20038       private final com.google.protobuf.UnknownFieldSet unknownFields;
20039       @java.lang.Override
20040       public final com.google.protobuf.UnknownFieldSet
20041           getUnknownFields() {
20042         return this.unknownFields;
20043       }
20044       private FamilyPath(
20045           com.google.protobuf.CodedInputStream input,
20046           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20047           throws com.google.protobuf.InvalidProtocolBufferException {
20048         initFields();
20049         int mutable_bitField0_ = 0;
20050         com.google.protobuf.UnknownFieldSet.Builder unknownFields =
20051             com.google.protobuf.UnknownFieldSet.newBuilder();
20052         try {
20053           boolean done = false;
20054           while (!done) {
20055             int tag = input.readTag();
20056             switch (tag) {
20057               case 0:
20058                 done = true;
20059                 break;
20060               default: {
20061                 if (!parseUnknownField(input, unknownFields,
20062                                        extensionRegistry, tag)) {
20063                   done = true;
20064                 }
20065                 break;
20066               }
20067               case 10: {
20068                 bitField0_ |= 0x00000001;
20069                 family_ = input.readBytes();
20070                 break;
20071               }
20072               case 18: {
20073                 bitField0_ |= 0x00000002;
20074                 path_ = input.readBytes();
20075                 break;
20076               }
20077             }
20078           }
20079         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
20080           throw e.setUnfinishedMessage(this);
20081         } catch (java.io.IOException e) {
20082           throw new com.google.protobuf.InvalidProtocolBufferException(
20083               e.getMessage()).setUnfinishedMessage(this);
20084         } finally {
20085           this.unknownFields = unknownFields.build();
20086           makeExtensionsImmutable();
20087         }
20088       }
20089       public static final com.google.protobuf.Descriptors.Descriptor
20090           getDescriptor() {
20091         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_descriptor;
20092       }
20093 
20094       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
20095           internalGetFieldAccessorTable() {
20096         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable
20097             .ensureFieldAccessorsInitialized(
20098                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder.class);
20099       }
20100 
20101       public static com.google.protobuf.Parser<FamilyPath> PARSER =
20102           new com.google.protobuf.AbstractParser<FamilyPath>() {
20103         public FamilyPath parsePartialFrom(
20104             com.google.protobuf.CodedInputStream input,
20105             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20106             throws com.google.protobuf.InvalidProtocolBufferException {
20107           return new FamilyPath(input, extensionRegistry);
20108         }
20109       };
20110 
20111       @java.lang.Override
20112       public com.google.protobuf.Parser<FamilyPath> getParserForType() {
20113         return PARSER;
20114       }
20115 
20116       private int bitField0_;
20117       // required bytes family = 1;
20118       public static final int FAMILY_FIELD_NUMBER = 1;
20119       private com.google.protobuf.ByteString family_;
20120       /**
20121        * <code>required bytes family = 1;</code>
20122        */
20123       public boolean hasFamily() {
20124         return ((bitField0_ & 0x00000001) == 0x00000001);
20125       }
20126       /**
20127        * <code>required bytes family = 1;</code>
20128        */
20129       public com.google.protobuf.ByteString getFamily() {
20130         return family_;
20131       }
20132 
20133       // required string path = 2;
20134       public static final int PATH_FIELD_NUMBER = 2;
20135       private java.lang.Object path_;
20136       /**
20137        * <code>required string path = 2;</code>
20138        */
20139       public boolean hasPath() {
20140         return ((bitField0_ & 0x00000002) == 0x00000002);
20141       }
20142       /**
20143        * <code>required string path = 2;</code>
20144        */
20145       public java.lang.String getPath() {
20146         java.lang.Object ref = path_;
20147         if (ref instanceof java.lang.String) {
20148           return (java.lang.String) ref;
20149         } else {
20150           com.google.protobuf.ByteString bs = 
20151               (com.google.protobuf.ByteString) ref;
20152           java.lang.String s = bs.toStringUtf8();
20153           if (bs.isValidUtf8()) {
20154             path_ = s;
20155           }
20156           return s;
20157         }
20158       }
20159       /**
20160        * <code>required string path = 2;</code>
20161        */
20162       public com.google.protobuf.ByteString
20163           getPathBytes() {
20164         java.lang.Object ref = path_;
20165         if (ref instanceof java.lang.String) {
20166           com.google.protobuf.ByteString b = 
20167               com.google.protobuf.ByteString.copyFromUtf8(
20168                   (java.lang.String) ref);
20169           path_ = b;
20170           return b;
20171         } else {
20172           return (com.google.protobuf.ByteString) ref;
20173         }
20174       }
20175 
20176       private void initFields() {
20177         family_ = com.google.protobuf.ByteString.EMPTY;
20178         path_ = "";
20179       }
20180       private byte memoizedIsInitialized = -1;
20181       public final boolean isInitialized() {
20182         byte isInitialized = memoizedIsInitialized;
20183         if (isInitialized != -1) return isInitialized == 1;
20184 
20185         if (!hasFamily()) {
20186           memoizedIsInitialized = 0;
20187           return false;
20188         }
20189         if (!hasPath()) {
20190           memoizedIsInitialized = 0;
20191           return false;
20192         }
20193         memoizedIsInitialized = 1;
20194         return true;
20195       }
20196 
20197       public void writeTo(com.google.protobuf.CodedOutputStream output)
20198                           throws java.io.IOException {
20199         getSerializedSize();
20200         if (((bitField0_ & 0x00000001) == 0x00000001)) {
20201           output.writeBytes(1, family_);
20202         }
20203         if (((bitField0_ & 0x00000002) == 0x00000002)) {
20204           output.writeBytes(2, getPathBytes());
20205         }
20206         getUnknownFields().writeTo(output);
20207       }
20208 
20209       private int memoizedSerializedSize = -1;
20210       public int getSerializedSize() {
20211         int size = memoizedSerializedSize;
20212         if (size != -1) return size;
20213 
20214         size = 0;
20215         if (((bitField0_ & 0x00000001) == 0x00000001)) {
20216           size += com.google.protobuf.CodedOutputStream
20217             .computeBytesSize(1, family_);
20218         }
20219         if (((bitField0_ & 0x00000002) == 0x00000002)) {
20220           size += com.google.protobuf.CodedOutputStream
20221             .computeBytesSize(2, getPathBytes());
20222         }
20223         size += getUnknownFields().getSerializedSize();
20224         memoizedSerializedSize = size;
20225         return size;
20226       }
20227 
20228       private static final long serialVersionUID = 0L;
20229       @java.lang.Override
20230       protected java.lang.Object writeReplace()
20231           throws java.io.ObjectStreamException {
20232         return super.writeReplace();
20233       }
20234 
20235       @java.lang.Override
20236       public boolean equals(final java.lang.Object obj) {
20237         if (obj == this) {
20238          return true;
20239         }
20240         if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath)) {
20241           return super.equals(obj);
20242         }
20243         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) obj;
20244 
20245         boolean result = true;
20246         result = result && (hasFamily() == other.hasFamily());
20247         if (hasFamily()) {
20248           result = result && getFamily()
20249               .equals(other.getFamily());
20250         }
20251         result = result && (hasPath() == other.hasPath());
20252         if (hasPath()) {
20253           result = result && getPath()
20254               .equals(other.getPath());
20255         }
20256         result = result &&
20257             getUnknownFields().equals(other.getUnknownFields());
20258         return result;
20259       }
20260 
20261       private int memoizedHashCode = 0;
20262       @java.lang.Override
20263       public int hashCode() {
20264         if (memoizedHashCode != 0) {
20265           return memoizedHashCode;
20266         }
20267         int hash = 41;
20268         hash = (19 * hash) + getDescriptorForType().hashCode();
20269         if (hasFamily()) {
20270           hash = (37 * hash) + FAMILY_FIELD_NUMBER;
20271           hash = (53 * hash) + getFamily().hashCode();
20272         }
20273         if (hasPath()) {
20274           hash = (37 * hash) + PATH_FIELD_NUMBER;
20275           hash = (53 * hash) + getPath().hashCode();
20276         }
20277         hash = (29 * hash) + getUnknownFields().hashCode();
20278         memoizedHashCode = hash;
20279         return hash;
20280       }
20281 
20282       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(
20283           com.google.protobuf.ByteString data)
20284           throws com.google.protobuf.InvalidProtocolBufferException {
20285         return PARSER.parseFrom(data);
20286       }
20287       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(
20288           com.google.protobuf.ByteString data,
20289           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20290           throws com.google.protobuf.InvalidProtocolBufferException {
20291         return PARSER.parseFrom(data, extensionRegistry);
20292       }
20293       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(byte[] data)
20294           throws com.google.protobuf.InvalidProtocolBufferException {
20295         return PARSER.parseFrom(data);
20296       }
20297       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(
20298           byte[] data,
20299           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20300           throws com.google.protobuf.InvalidProtocolBufferException {
20301         return PARSER.parseFrom(data, extensionRegistry);
20302       }
20303       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(java.io.InputStream input)
20304           throws java.io.IOException {
20305         return PARSER.parseFrom(input);
20306       }
20307       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(
20308           java.io.InputStream input,
20309           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20310           throws java.io.IOException {
20311         return PARSER.parseFrom(input, extensionRegistry);
20312       }
20313       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom(java.io.InputStream input)
20314           throws java.io.IOException {
20315         return PARSER.parseDelimitedFrom(input);
20316       }
20317       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom(
20318           java.io.InputStream input,
20319           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20320           throws java.io.IOException {
20321         return PARSER.parseDelimitedFrom(input, extensionRegistry);
20322       }
20323       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(
20324           com.google.protobuf.CodedInputStream input)
20325           throws java.io.IOException {
20326         return PARSER.parseFrom(input);
20327       }
20328       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(
20329           com.google.protobuf.CodedInputStream input,
20330           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20331           throws java.io.IOException {
20332         return PARSER.parseFrom(input, extensionRegistry);
20333       }
20334 
20335       public static Builder newBuilder() { return Builder.create(); }
20336       public Builder newBuilderForType() { return newBuilder(); }
20337       public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath prototype) {
20338         return newBuilder().mergeFrom(prototype);
20339       }
20340       public Builder toBuilder() { return newBuilder(this); }
20341 
20342       @java.lang.Override
20343       protected Builder newBuilderForType(
20344           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
20345         Builder builder = new Builder(parent);
20346         return builder;
20347       }
20348       /**
20349        * Protobuf type {@code BulkLoadHFileRequest.FamilyPath}
20350        */
20351       public static final class Builder extends
20352           com.google.protobuf.GeneratedMessage.Builder<Builder>
20353          implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder {
20354         public static final com.google.protobuf.Descriptors.Descriptor
20355             getDescriptor() {
20356           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_descriptor;
20357         }
20358 
20359         protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
20360             internalGetFieldAccessorTable() {
20361           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable
20362               .ensureFieldAccessorsInitialized(
20363                   org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder.class);
20364         }
20365 
20366         // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder()
20367         private Builder() {
20368           maybeForceBuilderInitialization();
20369         }
20370 
20371         private Builder(
20372             com.google.protobuf.GeneratedMessage.BuilderParent parent) {
20373           super(parent);
20374           maybeForceBuilderInitialization();
20375         }
20376         private void maybeForceBuilderInitialization() {
20377           if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
20378           }
20379         }
20380         private static Builder create() {
20381           return new Builder();
20382         }
20383 
20384         public Builder clear() {
20385           super.clear();
20386           family_ = com.google.protobuf.ByteString.EMPTY;
20387           bitField0_ = (bitField0_ & ~0x00000001);
20388           path_ = "";
20389           bitField0_ = (bitField0_ & ~0x00000002);
20390           return this;
20391         }
20392 
20393         public Builder clone() {
20394           return create().mergeFrom(buildPartial());
20395         }
20396 
20397         public com.google.protobuf.Descriptors.Descriptor
20398             getDescriptorForType() {
20399           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_descriptor;
20400         }
20401 
20402         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getDefaultInstanceForType() {
20403           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance();
20404         }
20405 
20406         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath build() {
20407           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = buildPartial();
20408           if (!result.isInitialized()) {
20409             throw newUninitializedMessageException(result);
20410           }
20411           return result;
20412         }
20413 
20414         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath buildPartial() {
20415           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath(this);
20416           int from_bitField0_ = bitField0_;
20417           int to_bitField0_ = 0;
20418           if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
20419             to_bitField0_ |= 0x00000001;
20420           }
20421           result.family_ = family_;
20422           if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
20423             to_bitField0_ |= 0x00000002;
20424           }
20425           result.path_ = path_;
20426           result.bitField0_ = to_bitField0_;
20427           onBuilt();
20428           return result;
20429         }
20430 
20431         public Builder mergeFrom(com.google.protobuf.Message other) {
20432           if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) {
20433             return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath)other);
20434           } else {
20435             super.mergeFrom(other);
20436             return this;
20437           }
20438         }
20439 
20440         public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath other) {
20441           if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()) return this;
20442           if (other.hasFamily()) {
20443             setFamily(other.getFamily());
20444           }
20445           if (other.hasPath()) {
20446             bitField0_ |= 0x00000002;
20447             path_ = other.path_;
20448             onChanged();
20449           }
20450           this.mergeUnknownFields(other.getUnknownFields());
20451           return this;
20452         }
20453 
20454         public final boolean isInitialized() {
20455           if (!hasFamily()) {
20456             
20457             return false;
20458           }
20459           if (!hasPath()) {
20460             
20461             return false;
20462           }
20463           return true;
20464         }
20465 
20466         public Builder mergeFrom(
20467             com.google.protobuf.CodedInputStream input,
20468             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20469             throws java.io.IOException {
20470           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parsedMessage = null;
20471           try {
20472             parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
20473           } catch (com.google.protobuf.InvalidProtocolBufferException e) {
20474             parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) e.getUnfinishedMessage();
20475             throw e;
20476           } finally {
20477             if (parsedMessage != null) {
20478               mergeFrom(parsedMessage);
20479             }
20480           }
20481           return this;
20482         }
20483         private int bitField0_;
20484 
20485         // required bytes family = 1;
20486         private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
20487         /**
20488          * <code>required bytes family = 1;</code>
20489          */
20490         public boolean hasFamily() {
20491           return ((bitField0_ & 0x00000001) == 0x00000001);
20492         }
20493         /**
20494          * <code>required bytes family = 1;</code>
20495          */
20496         public com.google.protobuf.ByteString getFamily() {
20497           return family_;
20498         }
20499         /**
20500          * <code>required bytes family = 1;</code>
20501          */
20502         public Builder setFamily(com.google.protobuf.ByteString value) {
20503           if (value == null) {
20504     throw new NullPointerException();
20505   }
20506   bitField0_ |= 0x00000001;
20507           family_ = value;
20508           onChanged();
20509           return this;
20510         }
20511         /**
20512          * <code>required bytes family = 1;</code>
20513          */
20514         public Builder clearFamily() {
20515           bitField0_ = (bitField0_ & ~0x00000001);
20516           family_ = getDefaultInstance().getFamily();
20517           onChanged();
20518           return this;
20519         }
20520 
20521         // required string path = 2;
20522         private java.lang.Object path_ = "";
20523         /**
20524          * <code>required string path = 2;</code>
20525          */
20526         public boolean hasPath() {
20527           return ((bitField0_ & 0x00000002) == 0x00000002);
20528         }
20529         /**
20530          * <code>required string path = 2;</code>
20531          */
20532         public java.lang.String getPath() {
20533           java.lang.Object ref = path_;
20534           if (!(ref instanceof java.lang.String)) {
20535             java.lang.String s = ((com.google.protobuf.ByteString) ref)
20536                 .toStringUtf8();
20537             path_ = s;
20538             return s;
20539           } else {
20540             return (java.lang.String) ref;
20541           }
20542         }
20543         /**
20544          * <code>required string path = 2;</code>
20545          */
20546         public com.google.protobuf.ByteString
20547             getPathBytes() {
20548           java.lang.Object ref = path_;
20549           if (ref instanceof String) {
20550             com.google.protobuf.ByteString b = 
20551                 com.google.protobuf.ByteString.copyFromUtf8(
20552                     (java.lang.String) ref);
20553             path_ = b;
20554             return b;
20555           } else {
20556             return (com.google.protobuf.ByteString) ref;
20557           }
20558         }
20559         /**
20560          * <code>required string path = 2;</code>
20561          */
20562         public Builder setPath(
20563             java.lang.String value) {
20564           if (value == null) {
20565     throw new NullPointerException();
20566   }
20567   bitField0_ |= 0x00000002;
20568           path_ = value;
20569           onChanged();
20570           return this;
20571         }
20572         /**
20573          * <code>required string path = 2;</code>
20574          */
20575         public Builder clearPath() {
20576           bitField0_ = (bitField0_ & ~0x00000002);
20577           path_ = getDefaultInstance().getPath();
20578           onChanged();
20579           return this;
20580         }
20581         /**
20582          * <code>required string path = 2;</code>
20583          */
20584         public Builder setPathBytes(
20585             com.google.protobuf.ByteString value) {
20586           if (value == null) {
20587     throw new NullPointerException();
20588   }
20589   bitField0_ |= 0x00000002;
20590           path_ = value;
20591           onChanged();
20592           return this;
20593         }
20594 
20595         // @@protoc_insertion_point(builder_scope:BulkLoadHFileRequest.FamilyPath)
20596       }
20597 
20598       static {
20599         defaultInstance = new FamilyPath(true);
20600         defaultInstance.initFields();
20601       }
20602 
20603       // @@protoc_insertion_point(class_scope:BulkLoadHFileRequest.FamilyPath)
20604     }
20605 
20606     private int bitField0_;
20607     // required .RegionSpecifier region = 1;
20608     public static final int REGION_FIELD_NUMBER = 1;
20609     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
20610     /**
20611      * <code>required .RegionSpecifier region = 1;</code>
20612      */
20613     public boolean hasRegion() {
20614       return ((bitField0_ & 0x00000001) == 0x00000001);
20615     }
20616     /**
20617      * <code>required .RegionSpecifier region = 1;</code>
20618      */
20619     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
20620       return region_;
20621     }
20622     /**
20623      * <code>required .RegionSpecifier region = 1;</code>
20624      */
20625     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
20626       return region_;
20627     }
20628 
20629     // repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;
20630     public static final int FAMILY_PATH_FIELD_NUMBER = 2;
20631     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> familyPath_;
20632     /**
20633      * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
20634      */
20635     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> getFamilyPathList() {
20636       return familyPath_;
20637     }
20638     /**
20639      * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
20640      */
20641     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> 
20642         getFamilyPathOrBuilderList() {
20643       return familyPath_;
20644     }
20645     /**
20646      * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
20647      */
20648     public int getFamilyPathCount() {
20649       return familyPath_.size();
20650     }
20651     /**
20652      * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
20653      */
20654     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) {
20655       return familyPath_.get(index);
20656     }
20657     /**
20658      * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
20659      */
20660     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder(
20661         int index) {
20662       return familyPath_.get(index);
20663     }
20664 
20665     // optional bool assign_seq_num = 3;
20666     public static final int ASSIGN_SEQ_NUM_FIELD_NUMBER = 3;
20667     private boolean assignSeqNum_;
20668     /**
20669      * <code>optional bool assign_seq_num = 3;</code>
20670      */
20671     public boolean hasAssignSeqNum() {
20672       return ((bitField0_ & 0x00000002) == 0x00000002);
20673     }
20674     /**
20675      * <code>optional bool assign_seq_num = 3;</code>
20676      */
20677     public boolean getAssignSeqNum() {
20678       return assignSeqNum_;
20679     }
20680 
20681     private void initFields() {
20682       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
20683       familyPath_ = java.util.Collections.emptyList();
20684       assignSeqNum_ = false;
20685     }
20686     private byte memoizedIsInitialized = -1;
20687     public final boolean isInitialized() {
20688       byte isInitialized = memoizedIsInitialized;
20689       if (isInitialized != -1) return isInitialized == 1;
20690 
20691       if (!hasRegion()) {
20692         memoizedIsInitialized = 0;
20693         return false;
20694       }
20695       if (!getRegion().isInitialized()) {
20696         memoizedIsInitialized = 0;
20697         return false;
20698       }
20699       for (int i = 0; i < getFamilyPathCount(); i++) {
20700         if (!getFamilyPath(i).isInitialized()) {
20701           memoizedIsInitialized = 0;
20702           return false;
20703         }
20704       }
20705       memoizedIsInitialized = 1;
20706       return true;
20707     }
20708 
20709     public void writeTo(com.google.protobuf.CodedOutputStream output)
20710                         throws java.io.IOException {
20711       getSerializedSize();
20712       if (((bitField0_ & 0x00000001) == 0x00000001)) {
20713         output.writeMessage(1, region_);
20714       }
20715       for (int i = 0; i < familyPath_.size(); i++) {
20716         output.writeMessage(2, familyPath_.get(i));
20717       }
20718       if (((bitField0_ & 0x00000002) == 0x00000002)) {
20719         output.writeBool(3, assignSeqNum_);
20720       }
20721       getUnknownFields().writeTo(output);
20722     }
20723 
20724     private int memoizedSerializedSize = -1;
20725     public int getSerializedSize() {
20726       int size = memoizedSerializedSize;
20727       if (size != -1) return size;
20728 
20729       size = 0;
20730       if (((bitField0_ & 0x00000001) == 0x00000001)) {
20731         size += com.google.protobuf.CodedOutputStream
20732           .computeMessageSize(1, region_);
20733       }
20734       for (int i = 0; i < familyPath_.size(); i++) {
20735         size += com.google.protobuf.CodedOutputStream
20736           .computeMessageSize(2, familyPath_.get(i));
20737       }
20738       if (((bitField0_ & 0x00000002) == 0x00000002)) {
20739         size += com.google.protobuf.CodedOutputStream
20740           .computeBoolSize(3, assignSeqNum_);
20741       }
20742       size += getUnknownFields().getSerializedSize();
20743       memoizedSerializedSize = size;
20744       return size;
20745     }
20746 
20747     private static final long serialVersionUID = 0L;
20748     @java.lang.Override
20749     protected java.lang.Object writeReplace()
20750         throws java.io.ObjectStreamException {
20751       return super.writeReplace();
20752     }
20753 
20754     @java.lang.Override
20755     public boolean equals(final java.lang.Object obj) {
20756       if (obj == this) {
20757        return true;
20758       }
20759       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)) {
20760         return super.equals(obj);
20761       }
20762       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest) obj;
20763 
20764       boolean result = true;
20765       result = result && (hasRegion() == other.hasRegion());
20766       if (hasRegion()) {
20767         result = result && getRegion()
20768             .equals(other.getRegion());
20769       }
20770       result = result && getFamilyPathList()
20771           .equals(other.getFamilyPathList());
20772       result = result && (hasAssignSeqNum() == other.hasAssignSeqNum());
20773       if (hasAssignSeqNum()) {
20774         result = result && (getAssignSeqNum()
20775             == other.getAssignSeqNum());
20776       }
20777       result = result &&
20778           getUnknownFields().equals(other.getUnknownFields());
20779       return result;
20780     }
20781 
20782     private int memoizedHashCode = 0;
20783     @java.lang.Override
20784     public int hashCode() {
20785       if (memoizedHashCode != 0) {
20786         return memoizedHashCode;
20787       }
20788       int hash = 41;
20789       hash = (19 * hash) + getDescriptorForType().hashCode();
20790       if (hasRegion()) {
20791         hash = (37 * hash) + REGION_FIELD_NUMBER;
20792         hash = (53 * hash) + getRegion().hashCode();
20793       }
20794       if (getFamilyPathCount() > 0) {
20795         hash = (37 * hash) + FAMILY_PATH_FIELD_NUMBER;
20796         hash = (53 * hash) + getFamilyPathList().hashCode();
20797       }
20798       if (hasAssignSeqNum()) {
20799         hash = (37 * hash) + ASSIGN_SEQ_NUM_FIELD_NUMBER;
20800         hash = (53 * hash) + hashBoolean(getAssignSeqNum());
20801       }
20802       hash = (29 * hash) + getUnknownFields().hashCode();
20803       memoizedHashCode = hash;
20804       return hash;
20805     }
20806 
20807     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(
20808         com.google.protobuf.ByteString data)
20809         throws com.google.protobuf.InvalidProtocolBufferException {
20810       return PARSER.parseFrom(data);
20811     }
20812     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(
20813         com.google.protobuf.ByteString data,
20814         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20815         throws com.google.protobuf.InvalidProtocolBufferException {
20816       return PARSER.parseFrom(data, extensionRegistry);
20817     }
20818     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(byte[] data)
20819         throws com.google.protobuf.InvalidProtocolBufferException {
20820       return PARSER.parseFrom(data);
20821     }
20822     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(
20823         byte[] data,
20824         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20825         throws com.google.protobuf.InvalidProtocolBufferException {
20826       return PARSER.parseFrom(data, extensionRegistry);
20827     }
20828     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(java.io.InputStream input)
20829         throws java.io.IOException {
20830       return PARSER.parseFrom(input);
20831     }
20832     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(
20833         java.io.InputStream input,
20834         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20835         throws java.io.IOException {
20836       return PARSER.parseFrom(input, extensionRegistry);
20837     }
20838     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseDelimitedFrom(java.io.InputStream input)
20839         throws java.io.IOException {
20840       return PARSER.parseDelimitedFrom(input);
20841     }
20842     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseDelimitedFrom(
20843         java.io.InputStream input,
20844         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20845         throws java.io.IOException {
20846       return PARSER.parseDelimitedFrom(input, extensionRegistry);
20847     }
20848     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(
20849         com.google.protobuf.CodedInputStream input)
20850         throws java.io.IOException {
20851       return PARSER.parseFrom(input);
20852     }
20853     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(
20854         com.google.protobuf.CodedInputStream input,
20855         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20856         throws java.io.IOException {
20857       return PARSER.parseFrom(input, extensionRegistry);
20858     }
20859 
20860     public static Builder newBuilder() { return Builder.create(); }
20861     public Builder newBuilderForType() { return newBuilder(); }
20862     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest prototype) {
20863       return newBuilder().mergeFrom(prototype);
20864     }
20865     public Builder toBuilder() { return newBuilder(this); }
20866 
20867     @java.lang.Override
20868     protected Builder newBuilderForType(
20869         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
20870       Builder builder = new Builder(parent);
20871       return builder;
20872     }
20873     /**
20874      * Protobuf type {@code BulkLoadHFileRequest}
20875      *
20876      * <pre>
20877      **
20878      * Atomically bulk load multiple HFiles (say from different column families)
20879      * into an open region.
20880      * </pre>
20881      */
20882     public static final class Builder extends
20883         com.google.protobuf.GeneratedMessage.Builder<Builder>
20884        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequestOrBuilder {
20885       public static final com.google.protobuf.Descriptors.Descriptor
20886           getDescriptor() {
20887         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_descriptor;
20888       }
20889 
20890       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
20891           internalGetFieldAccessorTable() {
20892         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_fieldAccessorTable
20893             .ensureFieldAccessorsInitialized(
20894                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.Builder.class);
20895       }
20896 
20897       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.newBuilder()
20898       private Builder() {
20899         maybeForceBuilderInitialization();
20900       }
20901 
20902       private Builder(
20903           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
20904         super(parent);
20905         maybeForceBuilderInitialization();
20906       }
20907       private void maybeForceBuilderInitialization() {
20908         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
20909           getRegionFieldBuilder();
20910           getFamilyPathFieldBuilder();
20911         }
20912       }
20913       private static Builder create() {
20914         return new Builder();
20915       }
20916 
20917       public Builder clear() {
20918         super.clear();
20919         if (regionBuilder_ == null) {
20920           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
20921         } else {
20922           regionBuilder_.clear();
20923         }
20924         bitField0_ = (bitField0_ & ~0x00000001);
20925         if (familyPathBuilder_ == null) {
20926           familyPath_ = java.util.Collections.emptyList();
20927           bitField0_ = (bitField0_ & ~0x00000002);
20928         } else {
20929           familyPathBuilder_.clear();
20930         }
20931         assignSeqNum_ = false;
20932         bitField0_ = (bitField0_ & ~0x00000004);
20933         return this;
20934       }
20935 
20936       public Builder clone() {
20937         return create().mergeFrom(buildPartial());
20938       }
20939 
20940       public com.google.protobuf.Descriptors.Descriptor
20941           getDescriptorForType() {
20942         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_descriptor;
20943       }
20944 
20945       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest getDefaultInstanceForType() {
20946         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance();
20947       }
20948 
20949       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest build() {
20950         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = buildPartial();
20951         if (!result.isInitialized()) {
20952           throw newUninitializedMessageException(result);
20953         }
20954         return result;
20955       }
20956 
20957       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest buildPartial() {
20958         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest(this);
20959         int from_bitField0_ = bitField0_;
20960         int to_bitField0_ = 0;
20961         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
20962           to_bitField0_ |= 0x00000001;
20963         }
20964         if (regionBuilder_ == null) {
20965           result.region_ = region_;
20966         } else {
20967           result.region_ = regionBuilder_.build();
20968         }
20969         if (familyPathBuilder_ == null) {
20970           if (((bitField0_ & 0x00000002) == 0x00000002)) {
20971             familyPath_ = java.util.Collections.unmodifiableList(familyPath_);
20972             bitField0_ = (bitField0_ & ~0x00000002);
20973           }
20974           result.familyPath_ = familyPath_;
20975         } else {
20976           result.familyPath_ = familyPathBuilder_.build();
20977         }
20978         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
20979           to_bitField0_ |= 0x00000002;
20980         }
20981         result.assignSeqNum_ = assignSeqNum_;
20982         result.bitField0_ = to_bitField0_;
20983         onBuilt();
20984         return result;
20985       }
20986 
20987       public Builder mergeFrom(com.google.protobuf.Message other) {
20988         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest) {
20989           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)other);
20990         } else {
20991           super.mergeFrom(other);
20992           return this;
20993         }
20994       }
20995 
20996       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest other) {
20997         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance()) return this;
20998         if (other.hasRegion()) {
20999           mergeRegion(other.getRegion());
21000         }
21001         if (familyPathBuilder_ == null) {
21002           if (!other.familyPath_.isEmpty()) {
21003             if (familyPath_.isEmpty()) {
21004               familyPath_ = other.familyPath_;
21005               bitField0_ = (bitField0_ & ~0x00000002);
21006             } else {
21007               ensureFamilyPathIsMutable();
21008               familyPath_.addAll(other.familyPath_);
21009             }
21010             onChanged();
21011           }
21012         } else {
21013           if (!other.familyPath_.isEmpty()) {
21014             if (familyPathBuilder_.isEmpty()) {
21015               familyPathBuilder_.dispose();
21016               familyPathBuilder_ = null;
21017               familyPath_ = other.familyPath_;
21018               bitField0_ = (bitField0_ & ~0x00000002);
21019               familyPathBuilder_ = 
21020                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
21021                    getFamilyPathFieldBuilder() : null;
21022             } else {
21023               familyPathBuilder_.addAllMessages(other.familyPath_);
21024             }
21025           }
21026         }
21027         if (other.hasAssignSeqNum()) {
21028           setAssignSeqNum(other.getAssignSeqNum());
21029         }
21030         this.mergeUnknownFields(other.getUnknownFields());
21031         return this;
21032       }
21033 
21034       public final boolean isInitialized() {
21035         if (!hasRegion()) {
21036           
21037           return false;
21038         }
21039         if (!getRegion().isInitialized()) {
21040           
21041           return false;
21042         }
21043         for (int i = 0; i < getFamilyPathCount(); i++) {
21044           if (!getFamilyPath(i).isInitialized()) {
21045             
21046             return false;
21047           }
21048         }
21049         return true;
21050       }
21051 
21052       public Builder mergeFrom(
21053           com.google.protobuf.CodedInputStream input,
21054           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21055           throws java.io.IOException {
21056         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parsedMessage = null;
21057         try {
21058           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
21059         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
21060           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest) e.getUnfinishedMessage();
21061           throw e;
21062         } finally {
21063           if (parsedMessage != null) {
21064             mergeFrom(parsedMessage);
21065           }
21066         }
21067         return this;
21068       }
21069       private int bitField0_;
21070 
21071       // required .RegionSpecifier region = 1;
21072       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
21073       private com.google.protobuf.SingleFieldBuilder<
21074           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
21075       /**
21076        * <code>required .RegionSpecifier region = 1;</code>
21077        */
21078       public boolean hasRegion() {
21079         return ((bitField0_ & 0x00000001) == 0x00000001);
21080       }
21081       /**
21082        * <code>required .RegionSpecifier region = 1;</code>
21083        */
21084       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
21085         if (regionBuilder_ == null) {
21086           return region_;
21087         } else {
21088           return regionBuilder_.getMessage();
21089         }
21090       }
21091       /**
21092        * <code>required .RegionSpecifier region = 1;</code>
21093        */
21094       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
21095         if (regionBuilder_ == null) {
21096           if (value == null) {
21097             throw new NullPointerException();
21098           }
21099           region_ = value;
21100           onChanged();
21101         } else {
21102           regionBuilder_.setMessage(value);
21103         }
21104         bitField0_ |= 0x00000001;
21105         return this;
21106       }
21107       /**
21108        * <code>required .RegionSpecifier region = 1;</code>
21109        */
21110       public Builder setRegion(
21111           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
21112         if (regionBuilder_ == null) {
21113           region_ = builderForValue.build();
21114           onChanged();
21115         } else {
21116           regionBuilder_.setMessage(builderForValue.build());
21117         }
21118         bitField0_ |= 0x00000001;
21119         return this;
21120       }
21121       /**
21122        * <code>required .RegionSpecifier region = 1;</code>
21123        */
21124       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
21125         if (regionBuilder_ == null) {
21126           if (((bitField0_ & 0x00000001) == 0x00000001) &&
21127               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
21128             region_ =
21129               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
21130           } else {
21131             region_ = value;
21132           }
21133           onChanged();
21134         } else {
21135           regionBuilder_.mergeFrom(value);
21136         }
21137         bitField0_ |= 0x00000001;
21138         return this;
21139       }
21140       /**
21141        * <code>required .RegionSpecifier region = 1;</code>
21142        */
21143       public Builder clearRegion() {
21144         if (regionBuilder_ == null) {
21145           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
21146           onChanged();
21147         } else {
21148           regionBuilder_.clear();
21149         }
21150         bitField0_ = (bitField0_ & ~0x00000001);
21151         return this;
21152       }
21153       /**
21154        * <code>required .RegionSpecifier region = 1;</code>
21155        */
21156       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
21157         bitField0_ |= 0x00000001;
21158         onChanged();
21159         return getRegionFieldBuilder().getBuilder();
21160       }
21161       /**
21162        * <code>required .RegionSpecifier region = 1;</code>
21163        */
21164       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
21165         if (regionBuilder_ != null) {
21166           return regionBuilder_.getMessageOrBuilder();
21167         } else {
21168           return region_;
21169         }
21170       }
21171       /**
21172        * <code>required .RegionSpecifier region = 1;</code>
21173        */
21174       private com.google.protobuf.SingleFieldBuilder<
21175           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> 
21176           getRegionFieldBuilder() {
21177         if (regionBuilder_ == null) {
21178           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
21179               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
21180                   region_,
21181                   getParentForChildren(),
21182                   isClean());
21183           region_ = null;
21184         }
21185         return regionBuilder_;
21186       }
21187 
21188       // repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;
21189       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> familyPath_ =
21190         java.util.Collections.emptyList();
21191       private void ensureFamilyPathIsMutable() {
21192         if (!((bitField0_ & 0x00000002) == 0x00000002)) {
21193           familyPath_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath>(familyPath_);
21194           bitField0_ |= 0x00000002;
21195          }
21196       }
21197 
21198       private com.google.protobuf.RepeatedFieldBuilder<
21199           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> familyPathBuilder_;
21200 
21201       /**
21202        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21203        */
21204       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> getFamilyPathList() {
21205         if (familyPathBuilder_ == null) {
21206           return java.util.Collections.unmodifiableList(familyPath_);
21207         } else {
21208           return familyPathBuilder_.getMessageList();
21209         }
21210       }
21211       /**
21212        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21213        */
21214       public int getFamilyPathCount() {
21215         if (familyPathBuilder_ == null) {
21216           return familyPath_.size();
21217         } else {
21218           return familyPathBuilder_.getCount();
21219         }
21220       }
21221       /**
21222        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21223        */
21224       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) {
21225         if (familyPathBuilder_ == null) {
21226           return familyPath_.get(index);
21227         } else {
21228           return familyPathBuilder_.getMessage(index);
21229         }
21230       }
21231       /**
21232        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21233        */
21234       public Builder setFamilyPath(
21235           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) {
21236         if (familyPathBuilder_ == null) {
21237           if (value == null) {
21238             throw new NullPointerException();
21239           }
21240           ensureFamilyPathIsMutable();
21241           familyPath_.set(index, value);
21242           onChanged();
21243         } else {
21244           familyPathBuilder_.setMessage(index, value);
21245         }
21246         return this;
21247       }
21248       /**
21249        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21250        */
21251       public Builder setFamilyPath(
21252           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) {
21253         if (familyPathBuilder_ == null) {
21254           ensureFamilyPathIsMutable();
21255           familyPath_.set(index, builderForValue.build());
21256           onChanged();
21257         } else {
21258           familyPathBuilder_.setMessage(index, builderForValue.build());
21259         }
21260         return this;
21261       }
21262       /**
21263        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21264        */
21265       public Builder addFamilyPath(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) {
21266         if (familyPathBuilder_ == null) {
21267           if (value == null) {
21268             throw new NullPointerException();
21269           }
21270           ensureFamilyPathIsMutable();
21271           familyPath_.add(value);
21272           onChanged();
21273         } else {
21274           familyPathBuilder_.addMessage(value);
21275         }
21276         return this;
21277       }
21278       /**
21279        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21280        */
21281       public Builder addFamilyPath(
21282           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) {
21283         if (familyPathBuilder_ == null) {
21284           if (value == null) {
21285             throw new NullPointerException();
21286           }
21287           ensureFamilyPathIsMutable();
21288           familyPath_.add(index, value);
21289           onChanged();
21290         } else {
21291           familyPathBuilder_.addMessage(index, value);
21292         }
21293         return this;
21294       }
21295       /**
21296        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21297        */
21298       public Builder addFamilyPath(
21299           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) {
21300         if (familyPathBuilder_ == null) {
21301           ensureFamilyPathIsMutable();
21302           familyPath_.add(builderForValue.build());
21303           onChanged();
21304         } else {
21305           familyPathBuilder_.addMessage(builderForValue.build());
21306         }
21307         return this;
21308       }
21309       /**
21310        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21311        */
21312       public Builder addFamilyPath(
21313           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) {
21314         if (familyPathBuilder_ == null) {
21315           ensureFamilyPathIsMutable();
21316           familyPath_.add(index, builderForValue.build());
21317           onChanged();
21318         } else {
21319           familyPathBuilder_.addMessage(index, builderForValue.build());
21320         }
21321         return this;
21322       }
21323       /**
21324        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21325        */
21326       public Builder addAllFamilyPath(
21327           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> values) {
21328         if (familyPathBuilder_ == null) {
21329           ensureFamilyPathIsMutable();
21330           super.addAll(values, familyPath_);
21331           onChanged();
21332         } else {
21333           familyPathBuilder_.addAllMessages(values);
21334         }
21335         return this;
21336       }
21337       /**
21338        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21339        */
21340       public Builder clearFamilyPath() {
21341         if (familyPathBuilder_ == null) {
21342           familyPath_ = java.util.Collections.emptyList();
21343           bitField0_ = (bitField0_ & ~0x00000002);
21344           onChanged();
21345         } else {
21346           familyPathBuilder_.clear();
21347         }
21348         return this;
21349       }
21350       /**
21351        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21352        */
21353       public Builder removeFamilyPath(int index) {
21354         if (familyPathBuilder_ == null) {
21355           ensureFamilyPathIsMutable();
21356           familyPath_.remove(index);
21357           onChanged();
21358         } else {
21359           familyPathBuilder_.remove(index);
21360         }
21361         return this;
21362       }
21363       /**
21364        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21365        */
21366       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder getFamilyPathBuilder(
21367           int index) {
21368         return getFamilyPathFieldBuilder().getBuilder(index);
21369       }
21370       /**
21371        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21372        */
21373       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder(
21374           int index) {
21375         if (familyPathBuilder_ == null) {
21376           return familyPath_.get(index);  } else {
21377           return familyPathBuilder_.getMessageOrBuilder(index);
21378         }
21379       }
21380       /**
21381        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21382        */
21383       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> 
21384            getFamilyPathOrBuilderList() {
21385         if (familyPathBuilder_ != null) {
21386           return familyPathBuilder_.getMessageOrBuilderList();
21387         } else {
21388           return java.util.Collections.unmodifiableList(familyPath_);
21389         }
21390       }
21391       /**
21392        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21393        */
21394       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder() {
21395         return getFamilyPathFieldBuilder().addBuilder(
21396             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance());
21397       }
21398       /**
21399        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21400        */
21401       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder(
21402           int index) {
21403         return getFamilyPathFieldBuilder().addBuilder(
21404             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance());
21405       }
21406       /**
21407        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21408        */
21409       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder> 
21410            getFamilyPathBuilderList() {
21411         return getFamilyPathFieldBuilder().getBuilderList();
21412       }
21413       private com.google.protobuf.RepeatedFieldBuilder<
21414           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> 
21415           getFamilyPathFieldBuilder() {
21416         if (familyPathBuilder_ == null) {
21417           familyPathBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
21418               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>(
21419                   familyPath_,
21420                   ((bitField0_ & 0x00000002) == 0x00000002),
21421                   getParentForChildren(),
21422                   isClean());
21423           familyPath_ = null;
21424         }
21425         return familyPathBuilder_;
21426       }
21427 
21428       // optional bool assign_seq_num = 3;
21429       private boolean assignSeqNum_ ;
21430       /**
21431        * <code>optional bool assign_seq_num = 3;</code>
21432        */
21433       public boolean hasAssignSeqNum() {
21434         return ((bitField0_ & 0x00000004) == 0x00000004);
21435       }
21436       /**
21437        * <code>optional bool assign_seq_num = 3;</code>
21438        */
21439       public boolean getAssignSeqNum() {
21440         return assignSeqNum_;
21441       }
21442       /**
21443        * <code>optional bool assign_seq_num = 3;</code>
21444        */
21445       public Builder setAssignSeqNum(boolean value) {
21446         bitField0_ |= 0x00000004;
21447         assignSeqNum_ = value;
21448         onChanged();
21449         return this;
21450       }
21451       /**
21452        * <code>optional bool assign_seq_num = 3;</code>
21453        */
21454       public Builder clearAssignSeqNum() {
21455         bitField0_ = (bitField0_ & ~0x00000004);
21456         assignSeqNum_ = false;
21457         onChanged();
21458         return this;
21459       }
21460 
21461       // @@protoc_insertion_point(builder_scope:BulkLoadHFileRequest)
21462     }
21463 
21464     static {
21465       defaultInstance = new BulkLoadHFileRequest(true);
21466       defaultInstance.initFields();
21467     }
21468 
21469     // @@protoc_insertion_point(class_scope:BulkLoadHFileRequest)
21470   }
21471 
21472   public interface BulkLoadHFileResponseOrBuilder
21473       extends com.google.protobuf.MessageOrBuilder {
21474 
21475     // required bool loaded = 1;
21476     /**
21477      * <code>required bool loaded = 1;</code>
21478      */
21479     boolean hasLoaded();
21480     /**
21481      * <code>required bool loaded = 1;</code>
21482      */
21483     boolean getLoaded();
21484   }
21485   /**
21486    * Protobuf type {@code BulkLoadHFileResponse}
21487    */
21488   public static final class BulkLoadHFileResponse extends
21489       com.google.protobuf.GeneratedMessage
21490       implements BulkLoadHFileResponseOrBuilder {
21491     // Use BulkLoadHFileResponse.newBuilder() to construct.
21492     private BulkLoadHFileResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
21493       super(builder);
21494       this.unknownFields = builder.getUnknownFields();
21495     }
21496     private BulkLoadHFileResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
21497 
21498     private static final BulkLoadHFileResponse defaultInstance;
21499     public static BulkLoadHFileResponse getDefaultInstance() {
21500       return defaultInstance;
21501     }
21502 
21503     public BulkLoadHFileResponse getDefaultInstanceForType() {
21504       return defaultInstance;
21505     }
21506 
21507     private final com.google.protobuf.UnknownFieldSet unknownFields;
21508     @java.lang.Override
21509     public final com.google.protobuf.UnknownFieldSet
21510         getUnknownFields() {
21511       return this.unknownFields;
21512     }
21513     private BulkLoadHFileResponse(
21514         com.google.protobuf.CodedInputStream input,
21515         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21516         throws com.google.protobuf.InvalidProtocolBufferException {
21517       initFields();
21518       int mutable_bitField0_ = 0;
21519       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
21520           com.google.protobuf.UnknownFieldSet.newBuilder();
21521       try {
21522         boolean done = false;
21523         while (!done) {
21524           int tag = input.readTag();
21525           switch (tag) {
21526             case 0:
21527               done = true;
21528               break;
21529             default: {
21530               if (!parseUnknownField(input, unknownFields,
21531                                      extensionRegistry, tag)) {
21532                 done = true;
21533               }
21534               break;
21535             }
21536             case 8: {
21537               bitField0_ |= 0x00000001;
21538               loaded_ = input.readBool();
21539               break;
21540             }
21541           }
21542         }
21543       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
21544         throw e.setUnfinishedMessage(this);
21545       } catch (java.io.IOException e) {
21546         throw new com.google.protobuf.InvalidProtocolBufferException(
21547             e.getMessage()).setUnfinishedMessage(this);
21548       } finally {
21549         this.unknownFields = unknownFields.build();
21550         makeExtensionsImmutable();
21551       }
21552     }
21553     public static final com.google.protobuf.Descriptors.Descriptor
21554         getDescriptor() {
21555       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_descriptor;
21556     }
21557 
21558     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
21559         internalGetFieldAccessorTable() {
21560       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_fieldAccessorTable
21561           .ensureFieldAccessorsInitialized(
21562               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.Builder.class);
21563     }
21564 
21565     public static com.google.protobuf.Parser<BulkLoadHFileResponse> PARSER =
21566         new com.google.protobuf.AbstractParser<BulkLoadHFileResponse>() {
21567       public BulkLoadHFileResponse parsePartialFrom(
21568           com.google.protobuf.CodedInputStream input,
21569           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21570           throws com.google.protobuf.InvalidProtocolBufferException {
21571         return new BulkLoadHFileResponse(input, extensionRegistry);
21572       }
21573     };
21574 
21575     @java.lang.Override
21576     public com.google.protobuf.Parser<BulkLoadHFileResponse> getParserForType() {
21577       return PARSER;
21578     }
21579 
21580     private int bitField0_;
21581     // required bool loaded = 1;
21582     public static final int LOADED_FIELD_NUMBER = 1;
21583     private boolean loaded_;
21584     /**
21585      * <code>required bool loaded = 1;</code>
21586      */
21587     public boolean hasLoaded() {
21588       return ((bitField0_ & 0x00000001) == 0x00000001);
21589     }
21590     /**
21591      * <code>required bool loaded = 1;</code>
21592      */
21593     public boolean getLoaded() {
21594       return loaded_;
21595     }
21596 
21597     private void initFields() {
21598       loaded_ = false;
21599     }
21600     private byte memoizedIsInitialized = -1;
21601     public final boolean isInitialized() {
21602       byte isInitialized = memoizedIsInitialized;
21603       if (isInitialized != -1) return isInitialized == 1;
21604 
21605       if (!hasLoaded()) {
21606         memoizedIsInitialized = 0;
21607         return false;
21608       }
21609       memoizedIsInitialized = 1;
21610       return true;
21611     }
21612 
21613     public void writeTo(com.google.protobuf.CodedOutputStream output)
21614                         throws java.io.IOException {
21615       getSerializedSize();
21616       if (((bitField0_ & 0x00000001) == 0x00000001)) {
21617         output.writeBool(1, loaded_);
21618       }
21619       getUnknownFields().writeTo(output);
21620     }
21621 
21622     private int memoizedSerializedSize = -1;
21623     public int getSerializedSize() {
21624       int size = memoizedSerializedSize;
21625       if (size != -1) return size;
21626 
21627       size = 0;
21628       if (((bitField0_ & 0x00000001) == 0x00000001)) {
21629         size += com.google.protobuf.CodedOutputStream
21630           .computeBoolSize(1, loaded_);
21631       }
21632       size += getUnknownFields().getSerializedSize();
21633       memoizedSerializedSize = size;
21634       return size;
21635     }
21636 
21637     private static final long serialVersionUID = 0L;
21638     @java.lang.Override
21639     protected java.lang.Object writeReplace()
21640         throws java.io.ObjectStreamException {
21641       return super.writeReplace();
21642     }
21643 
21644     @java.lang.Override
21645     public boolean equals(final java.lang.Object obj) {
21646       if (obj == this) {
21647        return true;
21648       }
21649       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse)) {
21650         return super.equals(obj);
21651       }
21652       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) obj;
21653 
21654       boolean result = true;
21655       result = result && (hasLoaded() == other.hasLoaded());
21656       if (hasLoaded()) {
21657         result = result && (getLoaded()
21658             == other.getLoaded());
21659       }
21660       result = result &&
21661           getUnknownFields().equals(other.getUnknownFields());
21662       return result;
21663     }
21664 
21665     private int memoizedHashCode = 0;
21666     @java.lang.Override
21667     public int hashCode() {
21668       if (memoizedHashCode != 0) {
21669         return memoizedHashCode;
21670       }
21671       int hash = 41;
21672       hash = (19 * hash) + getDescriptorForType().hashCode();
21673       if (hasLoaded()) {
21674         hash = (37 * hash) + LOADED_FIELD_NUMBER;
21675         hash = (53 * hash) + hashBoolean(getLoaded());
21676       }
21677       hash = (29 * hash) + getUnknownFields().hashCode();
21678       memoizedHashCode = hash;
21679       return hash;
21680     }
21681 
21682     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(
21683         com.google.protobuf.ByteString data)
21684         throws com.google.protobuf.InvalidProtocolBufferException {
21685       return PARSER.parseFrom(data);
21686     }
21687     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(
21688         com.google.protobuf.ByteString data,
21689         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21690         throws com.google.protobuf.InvalidProtocolBufferException {
21691       return PARSER.parseFrom(data, extensionRegistry);
21692     }
21693     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(byte[] data)
21694         throws com.google.protobuf.InvalidProtocolBufferException {
21695       return PARSER.parseFrom(data);
21696     }
21697     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(
21698         byte[] data,
21699         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21700         throws com.google.protobuf.InvalidProtocolBufferException {
21701       return PARSER.parseFrom(data, extensionRegistry);
21702     }
21703     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(java.io.InputStream input)
21704         throws java.io.IOException {
21705       return PARSER.parseFrom(input);
21706     }
21707     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(
21708         java.io.InputStream input,
21709         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21710         throws java.io.IOException {
21711       return PARSER.parseFrom(input, extensionRegistry);
21712     }
21713     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom(java.io.InputStream input)
21714         throws java.io.IOException {
21715       return PARSER.parseDelimitedFrom(input);
21716     }
21717     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom(
21718         java.io.InputStream input,
21719         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21720         throws java.io.IOException {
21721       return PARSER.parseDelimitedFrom(input, extensionRegistry);
21722     }
21723     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(
21724         com.google.protobuf.CodedInputStream input)
21725         throws java.io.IOException {
21726       return PARSER.parseFrom(input);
21727     }
21728     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(
21729         com.google.protobuf.CodedInputStream input,
21730         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21731         throws java.io.IOException {
21732       return PARSER.parseFrom(input, extensionRegistry);
21733     }
21734 
21735     public static Builder newBuilder() { return Builder.create(); }
21736     public Builder newBuilderForType() { return newBuilder(); }
21737     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse prototype) {
21738       return newBuilder().mergeFrom(prototype);
21739     }
21740     public Builder toBuilder() { return newBuilder(this); }
21741 
21742     @java.lang.Override
21743     protected Builder newBuilderForType(
21744         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
21745       Builder builder = new Builder(parent);
21746       return builder;
21747     }
21748     /**
21749      * Protobuf type {@code BulkLoadHFileResponse}
21750      */
21751     public static final class Builder extends
21752         com.google.protobuf.GeneratedMessage.Builder<Builder>
21753        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponseOrBuilder {
21754       public static final com.google.protobuf.Descriptors.Descriptor
21755           getDescriptor() {
21756         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_descriptor;
21757       }
21758 
21759       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
21760           internalGetFieldAccessorTable() {
21761         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_fieldAccessorTable
21762             .ensureFieldAccessorsInitialized(
21763                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.Builder.class);
21764       }
21765 
21766       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.newBuilder()
21767       private Builder() {
21768         maybeForceBuilderInitialization();
21769       }
21770 
21771       private Builder(
21772           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
21773         super(parent);
21774         maybeForceBuilderInitialization();
21775       }
21776       private void maybeForceBuilderInitialization() {
21777         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
21778         }
21779       }
21780       private static Builder create() {
21781         return new Builder();
21782       }
21783 
21784       public Builder clear() {
21785         super.clear();
21786         loaded_ = false;
21787         bitField0_ = (bitField0_ & ~0x00000001);
21788         return this;
21789       }
21790 
21791       public Builder clone() {
21792         return create().mergeFrom(buildPartial());
21793       }
21794 
21795       public com.google.protobuf.Descriptors.Descriptor
21796           getDescriptorForType() {
21797         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_descriptor;
21798       }
21799 
21800       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse getDefaultInstanceForType() {
21801         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance();
21802       }
21803 
21804       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse build() {
21805         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = buildPartial();
21806         if (!result.isInitialized()) {
21807           throw newUninitializedMessageException(result);
21808         }
21809         return result;
21810       }
21811 
21812       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse buildPartial() {
21813         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse(this);
21814         int from_bitField0_ = bitField0_;
21815         int to_bitField0_ = 0;
21816         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
21817           to_bitField0_ |= 0x00000001;
21818         }
21819         result.loaded_ = loaded_;
21820         result.bitField0_ = to_bitField0_;
21821         onBuilt();
21822         return result;
21823       }
21824 
21825       public Builder mergeFrom(com.google.protobuf.Message other) {
21826         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) {
21827           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse)other);
21828         } else {
21829           super.mergeFrom(other);
21830           return this;
21831         }
21832       }
21833 
21834       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other) {
21835         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance()) return this;
21836         if (other.hasLoaded()) {
21837           setLoaded(other.getLoaded());
21838         }
21839         this.mergeUnknownFields(other.getUnknownFields());
21840         return this;
21841       }
21842 
21843       public final boolean isInitialized() {
21844         if (!hasLoaded()) {
21845           
21846           return false;
21847         }
21848         return true;
21849       }
21850 
21851       public Builder mergeFrom(
21852           com.google.protobuf.CodedInputStream input,
21853           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21854           throws java.io.IOException {
21855         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parsedMessage = null;
21856         try {
21857           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
21858         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
21859           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) e.getUnfinishedMessage();
21860           throw e;
21861         } finally {
21862           if (parsedMessage != null) {
21863             mergeFrom(parsedMessage);
21864           }
21865         }
21866         return this;
21867       }
21868       private int bitField0_;
21869 
21870       // required bool loaded = 1;
21871       private boolean loaded_ ;
21872       /**
21873        * <code>required bool loaded = 1;</code>
21874        */
21875       public boolean hasLoaded() {
21876         return ((bitField0_ & 0x00000001) == 0x00000001);
21877       }
21878       /**
21879        * <code>required bool loaded = 1;</code>
21880        */
21881       public boolean getLoaded() {
21882         return loaded_;
21883       }
21884       /**
21885        * <code>required bool loaded = 1;</code>
21886        */
21887       public Builder setLoaded(boolean value) {
21888         bitField0_ |= 0x00000001;
21889         loaded_ = value;
21890         onChanged();
21891         return this;
21892       }
21893       /**
21894        * <code>required bool loaded = 1;</code>
21895        */
21896       public Builder clearLoaded() {
21897         bitField0_ = (bitField0_ & ~0x00000001);
21898         loaded_ = false;
21899         onChanged();
21900         return this;
21901       }
21902 
21903       // @@protoc_insertion_point(builder_scope:BulkLoadHFileResponse)
21904     }
21905 
21906     static {
21907       defaultInstance = new BulkLoadHFileResponse(true);
21908       defaultInstance.initFields();
21909     }
21910 
21911     // @@protoc_insertion_point(class_scope:BulkLoadHFileResponse)
21912   }
21913 
21914   public interface CoprocessorServiceCallOrBuilder
21915       extends com.google.protobuf.MessageOrBuilder {
21916 
21917     // required bytes row = 1;
21918     /**
21919      * <code>required bytes row = 1;</code>
21920      */
21921     boolean hasRow();
21922     /**
21923      * <code>required bytes row = 1;</code>
21924      */
21925     com.google.protobuf.ByteString getRow();
21926 
21927     // required string service_name = 2;
21928     /**
21929      * <code>required string service_name = 2;</code>
21930      */
21931     boolean hasServiceName();
21932     /**
21933      * <code>required string service_name = 2;</code>
21934      */
21935     java.lang.String getServiceName();
21936     /**
21937      * <code>required string service_name = 2;</code>
21938      */
21939     com.google.protobuf.ByteString
21940         getServiceNameBytes();
21941 
21942     // required string method_name = 3;
21943     /**
21944      * <code>required string method_name = 3;</code>
21945      */
21946     boolean hasMethodName();
21947     /**
21948      * <code>required string method_name = 3;</code>
21949      */
21950     java.lang.String getMethodName();
21951     /**
21952      * <code>required string method_name = 3;</code>
21953      */
21954     com.google.protobuf.ByteString
21955         getMethodNameBytes();
21956 
21957     // required bytes request = 4;
21958     /**
21959      * <code>required bytes request = 4;</code>
21960      */
21961     boolean hasRequest();
21962     /**
21963      * <code>required bytes request = 4;</code>
21964      */
21965     com.google.protobuf.ByteString getRequest();
21966   }
21967   /**
21968    * Protobuf type {@code CoprocessorServiceCall}
21969    */
21970   public static final class CoprocessorServiceCall extends
21971       com.google.protobuf.GeneratedMessage
21972       implements CoprocessorServiceCallOrBuilder {
21973     // Use CoprocessorServiceCall.newBuilder() to construct.
21974     private CoprocessorServiceCall(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
21975       super(builder);
21976       this.unknownFields = builder.getUnknownFields();
21977     }
21978     private CoprocessorServiceCall(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
21979 
21980     private static final CoprocessorServiceCall defaultInstance;
21981     public static CoprocessorServiceCall getDefaultInstance() {
21982       return defaultInstance;
21983     }
21984 
21985     public CoprocessorServiceCall getDefaultInstanceForType() {
21986       return defaultInstance;
21987     }
21988 
21989     private final com.google.protobuf.UnknownFieldSet unknownFields;
21990     @java.lang.Override
21991     public final com.google.protobuf.UnknownFieldSet
21992         getUnknownFields() {
21993       return this.unknownFields;
21994     }
21995     private CoprocessorServiceCall(
21996         com.google.protobuf.CodedInputStream input,
21997         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21998         throws com.google.protobuf.InvalidProtocolBufferException {
21999       initFields();
22000       int mutable_bitField0_ = 0;
22001       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
22002           com.google.protobuf.UnknownFieldSet.newBuilder();
22003       try {
22004         boolean done = false;
22005         while (!done) {
22006           int tag = input.readTag();
22007           switch (tag) {
22008             case 0:
22009               done = true;
22010               break;
22011             default: {
22012               if (!parseUnknownField(input, unknownFields,
22013                                      extensionRegistry, tag)) {
22014                 done = true;
22015               }
22016               break;
22017             }
22018             case 10: {
22019               bitField0_ |= 0x00000001;
22020               row_ = input.readBytes();
22021               break;
22022             }
22023             case 18: {
22024               bitField0_ |= 0x00000002;
22025               serviceName_ = input.readBytes();
22026               break;
22027             }
22028             case 26: {
22029               bitField0_ |= 0x00000004;
22030               methodName_ = input.readBytes();
22031               break;
22032             }
22033             case 34: {
22034               bitField0_ |= 0x00000008;
22035               request_ = input.readBytes();
22036               break;
22037             }
22038           }
22039         }
22040       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
22041         throw e.setUnfinishedMessage(this);
22042       } catch (java.io.IOException e) {
22043         throw new com.google.protobuf.InvalidProtocolBufferException(
22044             e.getMessage()).setUnfinishedMessage(this);
22045       } finally {
22046         this.unknownFields = unknownFields.build();
22047         makeExtensionsImmutable();
22048       }
22049     }
22050     public static final com.google.protobuf.Descriptors.Descriptor
22051         getDescriptor() {
22052       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_descriptor;
22053     }
22054 
22055     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
22056         internalGetFieldAccessorTable() {
22057       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_fieldAccessorTable
22058           .ensureFieldAccessorsInitialized(
22059               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder.class);
22060     }
22061 
22062     public static com.google.protobuf.Parser<CoprocessorServiceCall> PARSER =
22063         new com.google.protobuf.AbstractParser<CoprocessorServiceCall>() {
22064       public CoprocessorServiceCall parsePartialFrom(
22065           com.google.protobuf.CodedInputStream input,
22066           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22067           throws com.google.protobuf.InvalidProtocolBufferException {
22068         return new CoprocessorServiceCall(input, extensionRegistry);
22069       }
22070     };
22071 
22072     @java.lang.Override
22073     public com.google.protobuf.Parser<CoprocessorServiceCall> getParserForType() {
22074       return PARSER;
22075     }
22076 
22077     private int bitField0_;
22078     // required bytes row = 1;
22079     public static final int ROW_FIELD_NUMBER = 1;
22080     private com.google.protobuf.ByteString row_;
22081     /**
22082      * <code>required bytes row = 1;</code>
22083      */
22084     public boolean hasRow() {
22085       return ((bitField0_ & 0x00000001) == 0x00000001);
22086     }
22087     /**
22088      * <code>required bytes row = 1;</code>
22089      */
22090     public com.google.protobuf.ByteString getRow() {
22091       return row_;
22092     }
22093 
22094     // required string service_name = 2;
22095     public static final int SERVICE_NAME_FIELD_NUMBER = 2;
22096     private java.lang.Object serviceName_;
22097     /**
22098      * <code>required string service_name = 2;</code>
22099      */
22100     public boolean hasServiceName() {
22101       return ((bitField0_ & 0x00000002) == 0x00000002);
22102     }
22103     /**
22104      * <code>required string service_name = 2;</code>
22105      */
22106     public java.lang.String getServiceName() {
22107       java.lang.Object ref = serviceName_;
22108       if (ref instanceof java.lang.String) {
22109         return (java.lang.String) ref;
22110       } else {
22111         com.google.protobuf.ByteString bs = 
22112             (com.google.protobuf.ByteString) ref;
22113         java.lang.String s = bs.toStringUtf8();
22114         if (bs.isValidUtf8()) {
22115           serviceName_ = s;
22116         }
22117         return s;
22118       }
22119     }
22120     /**
22121      * <code>required string service_name = 2;</code>
22122      */
22123     public com.google.protobuf.ByteString
22124         getServiceNameBytes() {
22125       java.lang.Object ref = serviceName_;
22126       if (ref instanceof java.lang.String) {
22127         com.google.protobuf.ByteString b = 
22128             com.google.protobuf.ByteString.copyFromUtf8(
22129                 (java.lang.String) ref);
22130         serviceName_ = b;
22131         return b;
22132       } else {
22133         return (com.google.protobuf.ByteString) ref;
22134       }
22135     }
22136 
22137     // required string method_name = 3;
22138     public static final int METHOD_NAME_FIELD_NUMBER = 3;
22139     private java.lang.Object methodName_;
22140     /**
22141      * <code>required string method_name = 3;</code>
22142      */
22143     public boolean hasMethodName() {
22144       return ((bitField0_ & 0x00000004) == 0x00000004);
22145     }
22146     /**
22147      * <code>required string method_name = 3;</code>
22148      */
22149     public java.lang.String getMethodName() {
22150       java.lang.Object ref = methodName_;
22151       if (ref instanceof java.lang.String) {
22152         return (java.lang.String) ref;
22153       } else {
22154         com.google.protobuf.ByteString bs = 
22155             (com.google.protobuf.ByteString) ref;
22156         java.lang.String s = bs.toStringUtf8();
22157         if (bs.isValidUtf8()) {
22158           methodName_ = s;
22159         }
22160         return s;
22161       }
22162     }
22163     /**
22164      * <code>required string method_name = 3;</code>
22165      */
22166     public com.google.protobuf.ByteString
22167         getMethodNameBytes() {
22168       java.lang.Object ref = methodName_;
22169       if (ref instanceof java.lang.String) {
22170         com.google.protobuf.ByteString b = 
22171             com.google.protobuf.ByteString.copyFromUtf8(
22172                 (java.lang.String) ref);
22173         methodName_ = b;
22174         return b;
22175       } else {
22176         return (com.google.protobuf.ByteString) ref;
22177       }
22178     }
22179 
22180     // required bytes request = 4;
22181     public static final int REQUEST_FIELD_NUMBER = 4;
22182     private com.google.protobuf.ByteString request_;
22183     /**
22184      * <code>required bytes request = 4;</code>
22185      */
22186     public boolean hasRequest() {
22187       return ((bitField0_ & 0x00000008) == 0x00000008);
22188     }
22189     /**
22190      * <code>required bytes request = 4;</code>
22191      */
22192     public com.google.protobuf.ByteString getRequest() {
22193       return request_;
22194     }
22195 
22196     private void initFields() {
22197       row_ = com.google.protobuf.ByteString.EMPTY;
22198       serviceName_ = "";
22199       methodName_ = "";
22200       request_ = com.google.protobuf.ByteString.EMPTY;
22201     }
22202     private byte memoizedIsInitialized = -1;
22203     public final boolean isInitialized() {
22204       byte isInitialized = memoizedIsInitialized;
22205       if (isInitialized != -1) return isInitialized == 1;
22206 
22207       if (!hasRow()) {
22208         memoizedIsInitialized = 0;
22209         return false;
22210       }
22211       if (!hasServiceName()) {
22212         memoizedIsInitialized = 0;
22213         return false;
22214       }
22215       if (!hasMethodName()) {
22216         memoizedIsInitialized = 0;
22217         return false;
22218       }
22219       if (!hasRequest()) {
22220         memoizedIsInitialized = 0;
22221         return false;
22222       }
22223       memoizedIsInitialized = 1;
22224       return true;
22225     }
22226 
22227     public void writeTo(com.google.protobuf.CodedOutputStream output)
22228                         throws java.io.IOException {
22229       getSerializedSize();
22230       if (((bitField0_ & 0x00000001) == 0x00000001)) {
22231         output.writeBytes(1, row_);
22232       }
22233       if (((bitField0_ & 0x00000002) == 0x00000002)) {
22234         output.writeBytes(2, getServiceNameBytes());
22235       }
22236       if (((bitField0_ & 0x00000004) == 0x00000004)) {
22237         output.writeBytes(3, getMethodNameBytes());
22238       }
22239       if (((bitField0_ & 0x00000008) == 0x00000008)) {
22240         output.writeBytes(4, request_);
22241       }
22242       getUnknownFields().writeTo(output);
22243     }
22244 
22245     private int memoizedSerializedSize = -1;
22246     public int getSerializedSize() {
22247       int size = memoizedSerializedSize;
22248       if (size != -1) return size;
22249 
22250       size = 0;
22251       if (((bitField0_ & 0x00000001) == 0x00000001)) {
22252         size += com.google.protobuf.CodedOutputStream
22253           .computeBytesSize(1, row_);
22254       }
22255       if (((bitField0_ & 0x00000002) == 0x00000002)) {
22256         size += com.google.protobuf.CodedOutputStream
22257           .computeBytesSize(2, getServiceNameBytes());
22258       }
22259       if (((bitField0_ & 0x00000004) == 0x00000004)) {
22260         size += com.google.protobuf.CodedOutputStream
22261           .computeBytesSize(3, getMethodNameBytes());
22262       }
22263       if (((bitField0_ & 0x00000008) == 0x00000008)) {
22264         size += com.google.protobuf.CodedOutputStream
22265           .computeBytesSize(4, request_);
22266       }
22267       size += getUnknownFields().getSerializedSize();
22268       memoizedSerializedSize = size;
22269       return size;
22270     }
22271 
22272     private static final long serialVersionUID = 0L;
22273     @java.lang.Override
22274     protected java.lang.Object writeReplace()
22275         throws java.io.ObjectStreamException {
22276       return super.writeReplace();
22277     }
22278 
22279     @java.lang.Override
22280     public boolean equals(final java.lang.Object obj) {
22281       if (obj == this) {
22282        return true;
22283       }
22284       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall)) {
22285         return super.equals(obj);
22286       }
22287       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall) obj;
22288 
22289       boolean result = true;
22290       result = result && (hasRow() == other.hasRow());
22291       if (hasRow()) {
22292         result = result && getRow()
22293             .equals(other.getRow());
22294       }
22295       result = result && (hasServiceName() == other.hasServiceName());
22296       if (hasServiceName()) {
22297         result = result && getServiceName()
22298             .equals(other.getServiceName());
22299       }
22300       result = result && (hasMethodName() == other.hasMethodName());
22301       if (hasMethodName()) {
22302         result = result && getMethodName()
22303             .equals(other.getMethodName());
22304       }
22305       result = result && (hasRequest() == other.hasRequest());
22306       if (hasRequest()) {
22307         result = result && getRequest()
22308             .equals(other.getRequest());
22309       }
22310       result = result &&
22311           getUnknownFields().equals(other.getUnknownFields());
22312       return result;
22313     }
22314 
22315     private int memoizedHashCode = 0;
22316     @java.lang.Override
22317     public int hashCode() {
22318       if (memoizedHashCode != 0) {
22319         return memoizedHashCode;
22320       }
22321       int hash = 41;
22322       hash = (19 * hash) + getDescriptorForType().hashCode();
22323       if (hasRow()) {
22324         hash = (37 * hash) + ROW_FIELD_NUMBER;
22325         hash = (53 * hash) + getRow().hashCode();
22326       }
22327       if (hasServiceName()) {
22328         hash = (37 * hash) + SERVICE_NAME_FIELD_NUMBER;
22329         hash = (53 * hash) + getServiceName().hashCode();
22330       }
22331       if (hasMethodName()) {
22332         hash = (37 * hash) + METHOD_NAME_FIELD_NUMBER;
22333         hash = (53 * hash) + getMethodName().hashCode();
22334       }
22335       if (hasRequest()) {
22336         hash = (37 * hash) + REQUEST_FIELD_NUMBER;
22337         hash = (53 * hash) + getRequest().hashCode();
22338       }
22339       hash = (29 * hash) + getUnknownFields().hashCode();
22340       memoizedHashCode = hash;
22341       return hash;
22342     }
22343 
22344     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(
22345         com.google.protobuf.ByteString data)
22346         throws com.google.protobuf.InvalidProtocolBufferException {
22347       return PARSER.parseFrom(data);
22348     }
22349     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(
22350         com.google.protobuf.ByteString data,
22351         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22352         throws com.google.protobuf.InvalidProtocolBufferException {
22353       return PARSER.parseFrom(data, extensionRegistry);
22354     }
22355     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(byte[] data)
22356         throws com.google.protobuf.InvalidProtocolBufferException {
22357       return PARSER.parseFrom(data);
22358     }
22359     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(
22360         byte[] data,
22361         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22362         throws com.google.protobuf.InvalidProtocolBufferException {
22363       return PARSER.parseFrom(data, extensionRegistry);
22364     }
22365     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(java.io.InputStream input)
22366         throws java.io.IOException {
22367       return PARSER.parseFrom(input);
22368     }
22369     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(
22370         java.io.InputStream input,
22371         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22372         throws java.io.IOException {
22373       return PARSER.parseFrom(input, extensionRegistry);
22374     }
22375     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseDelimitedFrom(java.io.InputStream input)
22376         throws java.io.IOException {
22377       return PARSER.parseDelimitedFrom(input);
22378     }
22379     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseDelimitedFrom(
22380         java.io.InputStream input,
22381         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22382         throws java.io.IOException {
22383       return PARSER.parseDelimitedFrom(input, extensionRegistry);
22384     }
22385     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(
22386         com.google.protobuf.CodedInputStream input)
22387         throws java.io.IOException {
22388       return PARSER.parseFrom(input);
22389     }
22390     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(
22391         com.google.protobuf.CodedInputStream input,
22392         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22393         throws java.io.IOException {
22394       return PARSER.parseFrom(input, extensionRegistry);
22395     }
22396 
22397     public static Builder newBuilder() { return Builder.create(); }
22398     public Builder newBuilderForType() { return newBuilder(); }
22399     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall prototype) {
22400       return newBuilder().mergeFrom(prototype);
22401     }
22402     public Builder toBuilder() { return newBuilder(this); }
22403 
22404     @java.lang.Override
22405     protected Builder newBuilderForType(
22406         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
22407       Builder builder = new Builder(parent);
22408       return builder;
22409     }
22410     /**
22411      * Protobuf type {@code CoprocessorServiceCall}
22412      */
22413     public static final class Builder extends
22414         com.google.protobuf.GeneratedMessage.Builder<Builder>
22415        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder {
22416       public static final com.google.protobuf.Descriptors.Descriptor
22417           getDescriptor() {
22418         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_descriptor;
22419       }
22420 
22421       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
22422           internalGetFieldAccessorTable() {
22423         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_fieldAccessorTable
22424             .ensureFieldAccessorsInitialized(
22425                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder.class);
22426       }
22427 
22428       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder()
22429       private Builder() {
22430         maybeForceBuilderInitialization();
22431       }
22432 
22433       private Builder(
22434           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
22435         super(parent);
22436         maybeForceBuilderInitialization();
22437       }
22438       private void maybeForceBuilderInitialization() {
22439         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
22440         }
22441       }
22442       private static Builder create() {
22443         return new Builder();
22444       }
22445 
22446       public Builder clear() {
22447         super.clear();
22448         row_ = com.google.protobuf.ByteString.EMPTY;
22449         bitField0_ = (bitField0_ & ~0x00000001);
22450         serviceName_ = "";
22451         bitField0_ = (bitField0_ & ~0x00000002);
22452         methodName_ = "";
22453         bitField0_ = (bitField0_ & ~0x00000004);
22454         request_ = com.google.protobuf.ByteString.EMPTY;
22455         bitField0_ = (bitField0_ & ~0x00000008);
22456         return this;
22457       }
22458 
22459       public Builder clone() {
22460         return create().mergeFrom(buildPartial());
22461       }
22462 
22463       public com.google.protobuf.Descriptors.Descriptor
22464           getDescriptorForType() {
22465         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_descriptor;
22466       }
22467 
22468       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getDefaultInstanceForType() {
22469         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
22470       }
22471 
22472       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall build() {
22473         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall result = buildPartial();
22474         if (!result.isInitialized()) {
22475           throw newUninitializedMessageException(result);
22476         }
22477         return result;
22478       }
22479 
22480       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall buildPartial() {
22481         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall(this);
22482         int from_bitField0_ = bitField0_;
22483         int to_bitField0_ = 0;
22484         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
22485           to_bitField0_ |= 0x00000001;
22486         }
22487         result.row_ = row_;
22488         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
22489           to_bitField0_ |= 0x00000002;
22490         }
22491         result.serviceName_ = serviceName_;
22492         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
22493           to_bitField0_ |= 0x00000004;
22494         }
22495         result.methodName_ = methodName_;
22496         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
22497           to_bitField0_ |= 0x00000008;
22498         }
22499         result.request_ = request_;
22500         result.bitField0_ = to_bitField0_;
22501         onBuilt();
22502         return result;
22503       }
22504 
22505       public Builder mergeFrom(com.google.protobuf.Message other) {
22506         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall) {
22507           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall)other);
22508         } else {
22509           super.mergeFrom(other);
22510           return this;
22511         }
22512       }
22513 
22514       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall other) {
22515         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance()) return this;
22516         if (other.hasRow()) {
22517           setRow(other.getRow());
22518         }
22519         if (other.hasServiceName()) {
22520           bitField0_ |= 0x00000002;
22521           serviceName_ = other.serviceName_;
22522           onChanged();
22523         }
22524         if (other.hasMethodName()) {
22525           bitField0_ |= 0x00000004;
22526           methodName_ = other.methodName_;
22527           onChanged();
22528         }
22529         if (other.hasRequest()) {
22530           setRequest(other.getRequest());
22531         }
22532         this.mergeUnknownFields(other.getUnknownFields());
22533         return this;
22534       }
22535 
22536       public final boolean isInitialized() {
22537         if (!hasRow()) {
22538           
22539           return false;
22540         }
22541         if (!hasServiceName()) {
22542           
22543           return false;
22544         }
22545         if (!hasMethodName()) {
22546           
22547           return false;
22548         }
22549         if (!hasRequest()) {
22550           
22551           return false;
22552         }
22553         return true;
22554       }
22555 
22556       public Builder mergeFrom(
22557           com.google.protobuf.CodedInputStream input,
22558           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22559           throws java.io.IOException {
22560         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parsedMessage = null;
22561         try {
22562           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
22563         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
22564           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall) e.getUnfinishedMessage();
22565           throw e;
22566         } finally {
22567           if (parsedMessage != null) {
22568             mergeFrom(parsedMessage);
22569           }
22570         }
22571         return this;
22572       }
22573       private int bitField0_;
22574 
22575       // required bytes row = 1;
22576       private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY;
22577       /**
22578        * <code>required bytes row = 1;</code>
22579        */
22580       public boolean hasRow() {
22581         return ((bitField0_ & 0x00000001) == 0x00000001);
22582       }
22583       /**
22584        * <code>required bytes row = 1;</code>
22585        */
22586       public com.google.protobuf.ByteString getRow() {
22587         return row_;
22588       }
22589       /**
22590        * <code>required bytes row = 1;</code>
22591        */
22592       public Builder setRow(com.google.protobuf.ByteString value) {
22593         if (value == null) {
22594     throw new NullPointerException();
22595   }
22596   bitField0_ |= 0x00000001;
22597         row_ = value;
22598         onChanged();
22599         return this;
22600       }
22601       /**
22602        * <code>required bytes row = 1;</code>
22603        */
22604       public Builder clearRow() {
22605         bitField0_ = (bitField0_ & ~0x00000001);
22606         row_ = getDefaultInstance().getRow();
22607         onChanged();
22608         return this;
22609       }
22610 
22611       // required string service_name = 2;
22612       private java.lang.Object serviceName_ = "";
22613       /**
22614        * <code>required string service_name = 2;</code>
22615        */
22616       public boolean hasServiceName() {
22617         return ((bitField0_ & 0x00000002) == 0x00000002);
22618       }
22619       /**
22620        * <code>required string service_name = 2;</code>
22621        */
22622       public java.lang.String getServiceName() {
22623         java.lang.Object ref = serviceName_;
22624         if (!(ref instanceof java.lang.String)) {
22625           java.lang.String s = ((com.google.protobuf.ByteString) ref)
22626               .toStringUtf8();
22627           serviceName_ = s;
22628           return s;
22629         } else {
22630           return (java.lang.String) ref;
22631         }
22632       }
22633       /**
22634        * <code>required string service_name = 2;</code>
22635        */
22636       public com.google.protobuf.ByteString
22637           getServiceNameBytes() {
22638         java.lang.Object ref = serviceName_;
22639         if (ref instanceof String) {
22640           com.google.protobuf.ByteString b = 
22641               com.google.protobuf.ByteString.copyFromUtf8(
22642                   (java.lang.String) ref);
22643           serviceName_ = b;
22644           return b;
22645         } else {
22646           return (com.google.protobuf.ByteString) ref;
22647         }
22648       }
22649       /**
22650        * <code>required string service_name = 2;</code>
22651        */
22652       public Builder setServiceName(
22653           java.lang.String value) {
22654         if (value == null) {
22655     throw new NullPointerException();
22656   }
22657   bitField0_ |= 0x00000002;
22658         serviceName_ = value;
22659         onChanged();
22660         return this;
22661       }
22662       /**
22663        * <code>required string service_name = 2;</code>
22664        */
22665       public Builder clearServiceName() {
22666         bitField0_ = (bitField0_ & ~0x00000002);
22667         serviceName_ = getDefaultInstance().getServiceName();
22668         onChanged();
22669         return this;
22670       }
22671       /**
22672        * <code>required string service_name = 2;</code>
22673        */
22674       public Builder setServiceNameBytes(
22675           com.google.protobuf.ByteString value) {
22676         if (value == null) {
22677     throw new NullPointerException();
22678   }
22679   bitField0_ |= 0x00000002;
22680         serviceName_ = value;
22681         onChanged();
22682         return this;
22683       }
22684 
22685       // required string method_name = 3;
22686       private java.lang.Object methodName_ = "";
22687       /**
22688        * <code>required string method_name = 3;</code>
22689        */
22690       public boolean hasMethodName() {
22691         return ((bitField0_ & 0x00000004) == 0x00000004);
22692       }
22693       /**
22694        * <code>required string method_name = 3;</code>
22695        */
22696       public java.lang.String getMethodName() {
22697         java.lang.Object ref = methodName_;
22698         if (!(ref instanceof java.lang.String)) {
22699           java.lang.String s = ((com.google.protobuf.ByteString) ref)
22700               .toStringUtf8();
22701           methodName_ = s;
22702           return s;
22703         } else {
22704           return (java.lang.String) ref;
22705         }
22706       }
22707       /**
22708        * <code>required string method_name = 3;</code>
22709        */
22710       public com.google.protobuf.ByteString
22711           getMethodNameBytes() {
22712         java.lang.Object ref = methodName_;
22713         if (ref instanceof String) {
22714           com.google.protobuf.ByteString b = 
22715               com.google.protobuf.ByteString.copyFromUtf8(
22716                   (java.lang.String) ref);
22717           methodName_ = b;
22718           return b;
22719         } else {
22720           return (com.google.protobuf.ByteString) ref;
22721         }
22722       }
22723       /**
22724        * <code>required string method_name = 3;</code>
22725        */
22726       public Builder setMethodName(
22727           java.lang.String value) {
22728         if (value == null) {
22729     throw new NullPointerException();
22730   }
22731   bitField0_ |= 0x00000004;
22732         methodName_ = value;
22733         onChanged();
22734         return this;
22735       }
22736       /**
22737        * <code>required string method_name = 3;</code>
22738        */
22739       public Builder clearMethodName() {
22740         bitField0_ = (bitField0_ & ~0x00000004);
22741         methodName_ = getDefaultInstance().getMethodName();
22742         onChanged();
22743         return this;
22744       }
22745       /**
22746        * <code>required string method_name = 3;</code>
22747        */
22748       public Builder setMethodNameBytes(
22749           com.google.protobuf.ByteString value) {
22750         if (value == null) {
22751     throw new NullPointerException();
22752   }
22753   bitField0_ |= 0x00000004;
22754         methodName_ = value;
22755         onChanged();
22756         return this;
22757       }
22758 
22759       // required bytes request = 4;
22760       private com.google.protobuf.ByteString request_ = com.google.protobuf.ByteString.EMPTY;
22761       /**
22762        * <code>required bytes request = 4;</code>
22763        */
22764       public boolean hasRequest() {
22765         return ((bitField0_ & 0x00000008) == 0x00000008);
22766       }
22767       /**
22768        * <code>required bytes request = 4;</code>
22769        */
22770       public com.google.protobuf.ByteString getRequest() {
22771         return request_;
22772       }
22773       /**
22774        * <code>required bytes request = 4;</code>
22775        */
22776       public Builder setRequest(com.google.protobuf.ByteString value) {
22777         if (value == null) {
22778     throw new NullPointerException();
22779   }
22780   bitField0_ |= 0x00000008;
22781         request_ = value;
22782         onChanged();
22783         return this;
22784       }
22785       /**
22786        * <code>required bytes request = 4;</code>
22787        */
22788       public Builder clearRequest() {
22789         bitField0_ = (bitField0_ & ~0x00000008);
22790         request_ = getDefaultInstance().getRequest();
22791         onChanged();
22792         return this;
22793       }
22794 
22795       // @@protoc_insertion_point(builder_scope:CoprocessorServiceCall)
22796     }
22797 
22798     static {
22799       defaultInstance = new CoprocessorServiceCall(true);
22800       defaultInstance.initFields();
22801     }
22802 
22803     // @@protoc_insertion_point(class_scope:CoprocessorServiceCall)
22804   }
22805 
22806   public interface CoprocessorServiceResultOrBuilder
22807       extends com.google.protobuf.MessageOrBuilder {
22808 
22809     // optional .NameBytesPair value = 1;
22810     /**
22811      * <code>optional .NameBytesPair value = 1;</code>
22812      */
22813     boolean hasValue();
22814     /**
22815      * <code>optional .NameBytesPair value = 1;</code>
22816      */
22817     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue();
22818     /**
22819      * <code>optional .NameBytesPair value = 1;</code>
22820      */
22821     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder();
22822   }
22823   /**
22824    * Protobuf type {@code CoprocessorServiceResult}
22825    */
22826   public static final class CoprocessorServiceResult extends
22827       com.google.protobuf.GeneratedMessage
22828       implements CoprocessorServiceResultOrBuilder {
22829     // Use CoprocessorServiceResult.newBuilder() to construct.
22830     private CoprocessorServiceResult(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
22831       super(builder);
22832       this.unknownFields = builder.getUnknownFields();
22833     }
22834     private CoprocessorServiceResult(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
22835 
22836     private static final CoprocessorServiceResult defaultInstance;
22837     public static CoprocessorServiceResult getDefaultInstance() {
22838       return defaultInstance;
22839     }
22840 
22841     public CoprocessorServiceResult getDefaultInstanceForType() {
22842       return defaultInstance;
22843     }
22844 
22845     private final com.google.protobuf.UnknownFieldSet unknownFields;
22846     @java.lang.Override
22847     public final com.google.protobuf.UnknownFieldSet
22848         getUnknownFields() {
22849       return this.unknownFields;
22850     }
22851     private CoprocessorServiceResult(
22852         com.google.protobuf.CodedInputStream input,
22853         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22854         throws com.google.protobuf.InvalidProtocolBufferException {
22855       initFields();
22856       int mutable_bitField0_ = 0;
22857       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
22858           com.google.protobuf.UnknownFieldSet.newBuilder();
22859       try {
22860         boolean done = false;
22861         while (!done) {
22862           int tag = input.readTag();
22863           switch (tag) {
22864             case 0:
22865               done = true;
22866               break;
22867             default: {
22868               if (!parseUnknownField(input, unknownFields,
22869                                      extensionRegistry, tag)) {
22870                 done = true;
22871               }
22872               break;
22873             }
22874             case 10: {
22875               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null;
22876               if (((bitField0_ & 0x00000001) == 0x00000001)) {
22877                 subBuilder = value_.toBuilder();
22878               }
22879               value_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry);
22880               if (subBuilder != null) {
22881                 subBuilder.mergeFrom(value_);
22882                 value_ = subBuilder.buildPartial();
22883               }
22884               bitField0_ |= 0x00000001;
22885               break;
22886             }
22887           }
22888         }
22889       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
22890         throw e.setUnfinishedMessage(this);
22891       } catch (java.io.IOException e) {
22892         throw new com.google.protobuf.InvalidProtocolBufferException(
22893             e.getMessage()).setUnfinishedMessage(this);
22894       } finally {
22895         this.unknownFields = unknownFields.build();
22896         makeExtensionsImmutable();
22897       }
22898     }
22899     public static final com.google.protobuf.Descriptors.Descriptor
22900         getDescriptor() {
22901       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResult_descriptor;
22902     }
22903 
22904     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
22905         internalGetFieldAccessorTable() {
22906       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResult_fieldAccessorTable
22907           .ensureFieldAccessorsInitialized(
22908               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder.class);
22909     }
22910 
22911     public static com.google.protobuf.Parser<CoprocessorServiceResult> PARSER =
22912         new com.google.protobuf.AbstractParser<CoprocessorServiceResult>() {
22913       public CoprocessorServiceResult parsePartialFrom(
22914           com.google.protobuf.CodedInputStream input,
22915           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22916           throws com.google.protobuf.InvalidProtocolBufferException {
22917         return new CoprocessorServiceResult(input, extensionRegistry);
22918       }
22919     };
22920 
22921     @java.lang.Override
22922     public com.google.protobuf.Parser<CoprocessorServiceResult> getParserForType() {
22923       return PARSER;
22924     }
22925 
22926     private int bitField0_;
22927     // optional .NameBytesPair value = 1;
22928     public static final int VALUE_FIELD_NUMBER = 1;
22929     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_;
22930     /**
22931      * <code>optional .NameBytesPair value = 1;</code>
22932      */
22933     public boolean hasValue() {
22934       return ((bitField0_ & 0x00000001) == 0x00000001);
22935     }
22936     /**
22937      * <code>optional .NameBytesPair value = 1;</code>
22938      */
22939     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() {
22940       return value_;
22941     }
22942     /**
22943      * <code>optional .NameBytesPair value = 1;</code>
22944      */
22945     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() {
22946       return value_;
22947     }
22948 
22949     private void initFields() {
22950       value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
22951     }
22952     private byte memoizedIsInitialized = -1;
22953     public final boolean isInitialized() {
22954       byte isInitialized = memoizedIsInitialized;
22955       if (isInitialized != -1) return isInitialized == 1;
22956 
22957       if (hasValue()) {
22958         if (!getValue().isInitialized()) {
22959           memoizedIsInitialized = 0;
22960           return false;
22961         }
22962       }
22963       memoizedIsInitialized = 1;
22964       return true;
22965     }
22966 
22967     public void writeTo(com.google.protobuf.CodedOutputStream output)
22968                         throws java.io.IOException {
22969       getSerializedSize();
22970       if (((bitField0_ & 0x00000001) == 0x00000001)) {
22971         output.writeMessage(1, value_);
22972       }
22973       getUnknownFields().writeTo(output);
22974     }
22975 
22976     private int memoizedSerializedSize = -1;
22977     public int getSerializedSize() {
22978       int size = memoizedSerializedSize;
22979       if (size != -1) return size;
22980 
22981       size = 0;
22982       if (((bitField0_ & 0x00000001) == 0x00000001)) {
22983         size += com.google.protobuf.CodedOutputStream
22984           .computeMessageSize(1, value_);
22985       }
22986       size += getUnknownFields().getSerializedSize();
22987       memoizedSerializedSize = size;
22988       return size;
22989     }
22990 
22991     private static final long serialVersionUID = 0L;
22992     @java.lang.Override
22993     protected java.lang.Object writeReplace()
22994         throws java.io.ObjectStreamException {
22995       return super.writeReplace();
22996     }
22997 
22998     @java.lang.Override
22999     public boolean equals(final java.lang.Object obj) {
23000       if (obj == this) {
23001        return true;
23002       }
23003       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult)) {
23004         return super.equals(obj);
23005       }
23006       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult) obj;
23007 
23008       boolean result = true;
23009       result = result && (hasValue() == other.hasValue());
23010       if (hasValue()) {
23011         result = result && getValue()
23012             .equals(other.getValue());
23013       }
23014       result = result &&
23015           getUnknownFields().equals(other.getUnknownFields());
23016       return result;
23017     }
23018 
23019     private int memoizedHashCode = 0;
23020     @java.lang.Override
23021     public int hashCode() {
23022       if (memoizedHashCode != 0) {
23023         return memoizedHashCode;
23024       }
23025       int hash = 41;
23026       hash = (19 * hash) + getDescriptorForType().hashCode();
23027       if (hasValue()) {
23028         hash = (37 * hash) + VALUE_FIELD_NUMBER;
23029         hash = (53 * hash) + getValue().hashCode();
23030       }
23031       hash = (29 * hash) + getUnknownFields().hashCode();
23032       memoizedHashCode = hash;
23033       return hash;
23034     }
23035 
23036     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(
23037         com.google.protobuf.ByteString data)
23038         throws com.google.protobuf.InvalidProtocolBufferException {
23039       return PARSER.parseFrom(data);
23040     }
23041     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(
23042         com.google.protobuf.ByteString data,
23043         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23044         throws com.google.protobuf.InvalidProtocolBufferException {
23045       return PARSER.parseFrom(data, extensionRegistry);
23046     }
23047     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(byte[] data)
23048         throws com.google.protobuf.InvalidProtocolBufferException {
23049       return PARSER.parseFrom(data);
23050     }
23051     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(
23052         byte[] data,
23053         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23054         throws com.google.protobuf.InvalidProtocolBufferException {
23055       return PARSER.parseFrom(data, extensionRegistry);
23056     }
23057     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(java.io.InputStream input)
23058         throws java.io.IOException {
23059       return PARSER.parseFrom(input);
23060     }
23061     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(
23062         java.io.InputStream input,
23063         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23064         throws java.io.IOException {
23065       return PARSER.parseFrom(input, extensionRegistry);
23066     }
23067     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseDelimitedFrom(java.io.InputStream input)
23068         throws java.io.IOException {
23069       return PARSER.parseDelimitedFrom(input);
23070     }
23071     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseDelimitedFrom(
23072         java.io.InputStream input,
23073         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23074         throws java.io.IOException {
23075       return PARSER.parseDelimitedFrom(input, extensionRegistry);
23076     }
23077     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(
23078         com.google.protobuf.CodedInputStream input)
23079         throws java.io.IOException {
23080       return PARSER.parseFrom(input);
23081     }
23082     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(
23083         com.google.protobuf.CodedInputStream input,
23084         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23085         throws java.io.IOException {
23086       return PARSER.parseFrom(input, extensionRegistry);
23087     }
23088 
23089     public static Builder newBuilder() { return Builder.create(); }
23090     public Builder newBuilderForType() { return newBuilder(); }
23091     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult prototype) {
23092       return newBuilder().mergeFrom(prototype);
23093     }
23094     public Builder toBuilder() { return newBuilder(this); }
23095 
23096     @java.lang.Override
23097     protected Builder newBuilderForType(
23098         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
23099       Builder builder = new Builder(parent);
23100       return builder;
23101     }
23102     /**
23103      * Protobuf type {@code CoprocessorServiceResult}
23104      */
23105     public static final class Builder extends
23106         com.google.protobuf.GeneratedMessage.Builder<Builder>
23107        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder {
23108       public static final com.google.protobuf.Descriptors.Descriptor
23109           getDescriptor() {
23110         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResult_descriptor;
23111       }
23112 
23113       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
23114           internalGetFieldAccessorTable() {
23115         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResult_fieldAccessorTable
23116             .ensureFieldAccessorsInitialized(
23117                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder.class);
23118       }
23119 
23120       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.newBuilder()
23121       private Builder() {
23122         maybeForceBuilderInitialization();
23123       }
23124 
23125       private Builder(
23126           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
23127         super(parent);
23128         maybeForceBuilderInitialization();
23129       }
23130       private void maybeForceBuilderInitialization() {
23131         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
23132           getValueFieldBuilder();
23133         }
23134       }
23135       private static Builder create() {
23136         return new Builder();
23137       }
23138 
23139       public Builder clear() {
23140         super.clear();
23141         if (valueBuilder_ == null) {
23142           value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
23143         } else {
23144           valueBuilder_.clear();
23145         }
23146         bitField0_ = (bitField0_ & ~0x00000001);
23147         return this;
23148       }
23149 
23150       public Builder clone() {
23151         return create().mergeFrom(buildPartial());
23152       }
23153 
23154       public com.google.protobuf.Descriptors.Descriptor
23155           getDescriptorForType() {
23156         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResult_descriptor;
23157       }
23158 
23159       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult getDefaultInstanceForType() {
23160         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance();
23161       }
23162 
23163       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult build() {
23164         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult result = buildPartial();
23165         if (!result.isInitialized()) {
23166           throw newUninitializedMessageException(result);
23167         }
23168         return result;
23169       }
23170 
23171       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult buildPartial() {
23172         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult(this);
23173         int from_bitField0_ = bitField0_;
23174         int to_bitField0_ = 0;
23175         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
23176           to_bitField0_ |= 0x00000001;
23177         }
23178         if (valueBuilder_ == null) {
23179           result.value_ = value_;
23180         } else {
23181           result.value_ = valueBuilder_.build();
23182         }
23183         result.bitField0_ = to_bitField0_;
23184         onBuilt();
23185         return result;
23186       }
23187 
23188       public Builder mergeFrom(com.google.protobuf.Message other) {
23189         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult) {
23190           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult)other);
23191         } else {
23192           super.mergeFrom(other);
23193           return this;
23194         }
23195       }
23196 
23197       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult other) {
23198         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance()) return this;
23199         if (other.hasValue()) {
23200           mergeValue(other.getValue());
23201         }
23202         this.mergeUnknownFields(other.getUnknownFields());
23203         return this;
23204       }
23205 
23206       public final boolean isInitialized() {
23207         if (hasValue()) {
23208           if (!getValue().isInitialized()) {
23209             
23210             return false;
23211           }
23212         }
23213         return true;
23214       }
23215 
23216       public Builder mergeFrom(
23217           com.google.protobuf.CodedInputStream input,
23218           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23219           throws java.io.IOException {
23220         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parsedMessage = null;
23221         try {
23222           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
23223         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
23224           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult) e.getUnfinishedMessage();
23225           throw e;
23226         } finally {
23227           if (parsedMessage != null) {
23228             mergeFrom(parsedMessage);
23229           }
23230         }
23231         return this;
23232       }
23233       private int bitField0_;
23234 
23235       // optional .NameBytesPair value = 1;
23236       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
23237       private com.google.protobuf.SingleFieldBuilder<
23238           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_;
23239       /**
23240        * <code>optional .NameBytesPair value = 1;</code>
23241        */
23242       public boolean hasValue() {
23243         return ((bitField0_ & 0x00000001) == 0x00000001);
23244       }
23245       /**
23246        * <code>optional .NameBytesPair value = 1;</code>
23247        */
23248       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() {
23249         if (valueBuilder_ == null) {
23250           return value_;
23251         } else {
23252           return valueBuilder_.getMessage();
23253         }
23254       }
23255       /**
23256        * <code>optional .NameBytesPair value = 1;</code>
23257        */
23258       public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
23259         if (valueBuilder_ == null) {
23260           if (value == null) {
23261             throw new NullPointerException();
23262           }
23263           value_ = value;
23264           onChanged();
23265         } else {
23266           valueBuilder_.setMessage(value);
23267         }
23268         bitField0_ |= 0x00000001;
23269         return this;
23270       }
23271       /**
23272        * <code>optional .NameBytesPair value = 1;</code>
23273        */
23274       public Builder setValue(
23275           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
23276         if (valueBuilder_ == null) {
23277           value_ = builderForValue.build();
23278           onChanged();
23279         } else {
23280           valueBuilder_.setMessage(builderForValue.build());
23281         }
23282         bitField0_ |= 0x00000001;
23283         return this;
23284       }
23285       /**
23286        * <code>optional .NameBytesPair value = 1;</code>
23287        */
23288       public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
23289         if (valueBuilder_ == null) {
23290           if (((bitField0_ & 0x00000001) == 0x00000001) &&
23291               value_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) {
23292             value_ =
23293               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial();
23294           } else {
23295             value_ = value;
23296           }
23297           onChanged();
23298         } else {
23299           valueBuilder_.mergeFrom(value);
23300         }
23301         bitField0_ |= 0x00000001;
23302         return this;
23303       }
23304       /**
23305        * <code>optional .NameBytesPair value = 1;</code>
23306        */
23307       public Builder clearValue() {
23308         if (valueBuilder_ == null) {
23309           value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
23310           onChanged();
23311         } else {
23312           valueBuilder_.clear();
23313         }
23314         bitField0_ = (bitField0_ & ~0x00000001);
23315         return this;
23316       }
23317       /**
23318        * <code>optional .NameBytesPair value = 1;</code>
23319        */
23320       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getValueBuilder() {
23321         bitField0_ |= 0x00000001;
23322         onChanged();
23323         return getValueFieldBuilder().getBuilder();
23324       }
23325       /**
23326        * <code>optional .NameBytesPair value = 1;</code>
23327        */
23328       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() {
23329         if (valueBuilder_ != null) {
23330           return valueBuilder_.getMessageOrBuilder();
23331         } else {
23332           return value_;
23333         }
23334       }
23335       /**
23336        * <code>optional .NameBytesPair value = 1;</code>
23337        */
23338       private com.google.protobuf.SingleFieldBuilder<
23339           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
23340           getValueFieldBuilder() {
23341         if (valueBuilder_ == null) {
23342           valueBuilder_ = new com.google.protobuf.SingleFieldBuilder<
23343               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
23344                   value_,
23345                   getParentForChildren(),
23346                   isClean());
23347           value_ = null;
23348         }
23349         return valueBuilder_;
23350       }
23351 
23352       // @@protoc_insertion_point(builder_scope:CoprocessorServiceResult)
23353     }
23354 
23355     static {
23356       defaultInstance = new CoprocessorServiceResult(true);
23357       defaultInstance.initFields();
23358     }
23359 
23360     // @@protoc_insertion_point(class_scope:CoprocessorServiceResult)
23361   }
23362 
23363   public interface CoprocessorServiceRequestOrBuilder
23364       extends com.google.protobuf.MessageOrBuilder {
23365 
23366     // required .RegionSpecifier region = 1;
23367     /**
23368      * <code>required .RegionSpecifier region = 1;</code>
23369      */
23370     boolean hasRegion();
23371     /**
23372      * <code>required .RegionSpecifier region = 1;</code>
23373      */
23374     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
23375     /**
23376      * <code>required .RegionSpecifier region = 1;</code>
23377      */
23378     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
23379 
23380     // required .CoprocessorServiceCall call = 2;
23381     /**
23382      * <code>required .CoprocessorServiceCall call = 2;</code>
23383      */
23384     boolean hasCall();
23385     /**
23386      * <code>required .CoprocessorServiceCall call = 2;</code>
23387      */
23388     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall();
23389     /**
23390      * <code>required .CoprocessorServiceCall call = 2;</code>
23391      */
23392     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder();
23393   }
23394   /**
23395    * Protobuf type {@code CoprocessorServiceRequest}
23396    */
23397   public static final class CoprocessorServiceRequest extends
23398       com.google.protobuf.GeneratedMessage
23399       implements CoprocessorServiceRequestOrBuilder {
23400     // Use CoprocessorServiceRequest.newBuilder() to construct.
23401     private CoprocessorServiceRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
23402       super(builder);
23403       this.unknownFields = builder.getUnknownFields();
23404     }
23405     private CoprocessorServiceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
23406 
23407     private static final CoprocessorServiceRequest defaultInstance;
23408     public static CoprocessorServiceRequest getDefaultInstance() {
23409       return defaultInstance;
23410     }
23411 
23412     public CoprocessorServiceRequest getDefaultInstanceForType() {
23413       return defaultInstance;
23414     }
23415 
23416     private final com.google.protobuf.UnknownFieldSet unknownFields;
23417     @java.lang.Override
23418     public final com.google.protobuf.UnknownFieldSet
23419         getUnknownFields() {
23420       return this.unknownFields;
23421     }
23422     private CoprocessorServiceRequest(
23423         com.google.protobuf.CodedInputStream input,
23424         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23425         throws com.google.protobuf.InvalidProtocolBufferException {
23426       initFields();
23427       int mutable_bitField0_ = 0;
23428       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
23429           com.google.protobuf.UnknownFieldSet.newBuilder();
23430       try {
23431         boolean done = false;
23432         while (!done) {
23433           int tag = input.readTag();
23434           switch (tag) {
23435             case 0:
23436               done = true;
23437               break;
23438             default: {
23439               if (!parseUnknownField(input, unknownFields,
23440                                      extensionRegistry, tag)) {
23441                 done = true;
23442               }
23443               break;
23444             }
23445             case 10: {
23446               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
23447               if (((bitField0_ & 0x00000001) == 0x00000001)) {
23448                 subBuilder = region_.toBuilder();
23449               }
23450               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
23451               if (subBuilder != null) {
23452                 subBuilder.mergeFrom(region_);
23453                 region_ = subBuilder.buildPartial();
23454               }
23455               bitField0_ |= 0x00000001;
23456               break;
23457             }
23458             case 18: {
23459               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder subBuilder = null;
23460               if (((bitField0_ & 0x00000002) == 0x00000002)) {
23461                 subBuilder = call_.toBuilder();
23462               }
23463               call_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.PARSER, extensionRegistry);
23464               if (subBuilder != null) {
23465                 subBuilder.mergeFrom(call_);
23466                 call_ = subBuilder.buildPartial();
23467               }
23468               bitField0_ |= 0x00000002;
23469               break;
23470             }
23471           }
23472         }
23473       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
23474         throw e.setUnfinishedMessage(this);
23475       } catch (java.io.IOException e) {
23476         throw new com.google.protobuf.InvalidProtocolBufferException(
23477             e.getMessage()).setUnfinishedMessage(this);
23478       } finally {
23479         this.unknownFields = unknownFields.build();
23480         makeExtensionsImmutable();
23481       }
23482     }
23483     public static final com.google.protobuf.Descriptors.Descriptor
23484         getDescriptor() {
23485       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_descriptor;
23486     }
23487 
23488     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
23489         internalGetFieldAccessorTable() {
23490       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_fieldAccessorTable
23491           .ensureFieldAccessorsInitialized(
23492               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder.class);
23493     }
23494 
23495     public static com.google.protobuf.Parser<CoprocessorServiceRequest> PARSER =
23496         new com.google.protobuf.AbstractParser<CoprocessorServiceRequest>() {
23497       public CoprocessorServiceRequest parsePartialFrom(
23498           com.google.protobuf.CodedInputStream input,
23499           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23500           throws com.google.protobuf.InvalidProtocolBufferException {
23501         return new CoprocessorServiceRequest(input, extensionRegistry);
23502       }
23503     };
23504 
23505     @java.lang.Override
23506     public com.google.protobuf.Parser<CoprocessorServiceRequest> getParserForType() {
23507       return PARSER;
23508     }
23509 
23510     private int bitField0_;
23511     // required .RegionSpecifier region = 1;
23512     public static final int REGION_FIELD_NUMBER = 1;
23513     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
23514     /**
23515      * <code>required .RegionSpecifier region = 1;</code>
23516      */
23517     public boolean hasRegion() {
23518       return ((bitField0_ & 0x00000001) == 0x00000001);
23519     }
23520     /**
23521      * <code>required .RegionSpecifier region = 1;</code>
23522      */
23523     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
23524       return region_;
23525     }
23526     /**
23527      * <code>required .RegionSpecifier region = 1;</code>
23528      */
23529     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
23530       return region_;
23531     }
23532 
23533     // required .CoprocessorServiceCall call = 2;
23534     public static final int CALL_FIELD_NUMBER = 2;
23535     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall call_;
23536     /**
23537      * <code>required .CoprocessorServiceCall call = 2;</code>
23538      */
23539     public boolean hasCall() {
23540       return ((bitField0_ & 0x00000002) == 0x00000002);
23541     }
23542     /**
23543      * <code>required .CoprocessorServiceCall call = 2;</code>
23544      */
23545     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall() {
23546       return call_;
23547     }
23548     /**
23549      * <code>required .CoprocessorServiceCall call = 2;</code>
23550      */
23551     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder() {
23552       return call_;
23553     }
23554 
23555     private void initFields() {
23556       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
23557       call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
23558     }
23559     private byte memoizedIsInitialized = -1;
23560     public final boolean isInitialized() {
23561       byte isInitialized = memoizedIsInitialized;
23562       if (isInitialized != -1) return isInitialized == 1;
23563 
23564       if (!hasRegion()) {
23565         memoizedIsInitialized = 0;
23566         return false;
23567       }
23568       if (!hasCall()) {
23569         memoizedIsInitialized = 0;
23570         return false;
23571       }
23572       if (!getRegion().isInitialized()) {
23573         memoizedIsInitialized = 0;
23574         return false;
23575       }
23576       if (!getCall().isInitialized()) {
23577         memoizedIsInitialized = 0;
23578         return false;
23579       }
23580       memoizedIsInitialized = 1;
23581       return true;
23582     }
23583 
23584     public void writeTo(com.google.protobuf.CodedOutputStream output)
23585                         throws java.io.IOException {
23586       getSerializedSize();
23587       if (((bitField0_ & 0x00000001) == 0x00000001)) {
23588         output.writeMessage(1, region_);
23589       }
23590       if (((bitField0_ & 0x00000002) == 0x00000002)) {
23591         output.writeMessage(2, call_);
23592       }
23593       getUnknownFields().writeTo(output);
23594     }
23595 
23596     private int memoizedSerializedSize = -1;
23597     public int getSerializedSize() {
23598       int size = memoizedSerializedSize;
23599       if (size != -1) return size;
23600 
23601       size = 0;
23602       if (((bitField0_ & 0x00000001) == 0x00000001)) {
23603         size += com.google.protobuf.CodedOutputStream
23604           .computeMessageSize(1, region_);
23605       }
23606       if (((bitField0_ & 0x00000002) == 0x00000002)) {
23607         size += com.google.protobuf.CodedOutputStream
23608           .computeMessageSize(2, call_);
23609       }
23610       size += getUnknownFields().getSerializedSize();
23611       memoizedSerializedSize = size;
23612       return size;
23613     }
23614 
23615     private static final long serialVersionUID = 0L;
23616     @java.lang.Override
23617     protected java.lang.Object writeReplace()
23618         throws java.io.ObjectStreamException {
23619       return super.writeReplace();
23620     }
23621 
23622     @java.lang.Override
23623     public boolean equals(final java.lang.Object obj) {
23624       if (obj == this) {
23625        return true;
23626       }
23627       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)) {
23628         return super.equals(obj);
23629       }
23630       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest) obj;
23631 
23632       boolean result = true;
23633       result = result && (hasRegion() == other.hasRegion());
23634       if (hasRegion()) {
23635         result = result && getRegion()
23636             .equals(other.getRegion());
23637       }
23638       result = result && (hasCall() == other.hasCall());
23639       if (hasCall()) {
23640         result = result && getCall()
23641             .equals(other.getCall());
23642       }
23643       result = result &&
23644           getUnknownFields().equals(other.getUnknownFields());
23645       return result;
23646     }
23647 
23648     private int memoizedHashCode = 0;
23649     @java.lang.Override
23650     public int hashCode() {
23651       if (memoizedHashCode != 0) {
23652         return memoizedHashCode;
23653       }
23654       int hash = 41;
23655       hash = (19 * hash) + getDescriptorForType().hashCode();
23656       if (hasRegion()) {
23657         hash = (37 * hash) + REGION_FIELD_NUMBER;
23658         hash = (53 * hash) + getRegion().hashCode();
23659       }
23660       if (hasCall()) {
23661         hash = (37 * hash) + CALL_FIELD_NUMBER;
23662         hash = (53 * hash) + getCall().hashCode();
23663       }
23664       hash = (29 * hash) + getUnknownFields().hashCode();
23665       memoizedHashCode = hash;
23666       return hash;
23667     }
23668 
23669     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(
23670         com.google.protobuf.ByteString data)
23671         throws com.google.protobuf.InvalidProtocolBufferException {
23672       return PARSER.parseFrom(data);
23673     }
23674     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(
23675         com.google.protobuf.ByteString data,
23676         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23677         throws com.google.protobuf.InvalidProtocolBufferException {
23678       return PARSER.parseFrom(data, extensionRegistry);
23679     }
23680     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(byte[] data)
23681         throws com.google.protobuf.InvalidProtocolBufferException {
23682       return PARSER.parseFrom(data);
23683     }
23684     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(
23685         byte[] data,
23686         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23687         throws com.google.protobuf.InvalidProtocolBufferException {
23688       return PARSER.parseFrom(data, extensionRegistry);
23689     }
23690     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(java.io.InputStream input)
23691         throws java.io.IOException {
23692       return PARSER.parseFrom(input);
23693     }
23694     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(
23695         java.io.InputStream input,
23696         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23697         throws java.io.IOException {
23698       return PARSER.parseFrom(input, extensionRegistry);
23699     }
23700     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseDelimitedFrom(java.io.InputStream input)
23701         throws java.io.IOException {
23702       return PARSER.parseDelimitedFrom(input);
23703     }
23704     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseDelimitedFrom(
23705         java.io.InputStream input,
23706         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23707         throws java.io.IOException {
23708       return PARSER.parseDelimitedFrom(input, extensionRegistry);
23709     }
23710     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(
23711         com.google.protobuf.CodedInputStream input)
23712         throws java.io.IOException {
23713       return PARSER.parseFrom(input);
23714     }
23715     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(
23716         com.google.protobuf.CodedInputStream input,
23717         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23718         throws java.io.IOException {
23719       return PARSER.parseFrom(input, extensionRegistry);
23720     }
23721 
23722     public static Builder newBuilder() { return Builder.create(); }
23723     public Builder newBuilderForType() { return newBuilder(); }
23724     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest prototype) {
23725       return newBuilder().mergeFrom(prototype);
23726     }
23727     public Builder toBuilder() { return newBuilder(this); }
23728 
23729     @java.lang.Override
23730     protected Builder newBuilderForType(
23731         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
23732       Builder builder = new Builder(parent);
23733       return builder;
23734     }
23735     /**
23736      * Protobuf type {@code CoprocessorServiceRequest}
23737      */
23738     public static final class Builder extends
23739         com.google.protobuf.GeneratedMessage.Builder<Builder>
23740        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequestOrBuilder {
23741       public static final com.google.protobuf.Descriptors.Descriptor
23742           getDescriptor() {
23743         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_descriptor;
23744       }
23745 
23746       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
23747           internalGetFieldAccessorTable() {
23748         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_fieldAccessorTable
23749             .ensureFieldAccessorsInitialized(
23750                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder.class);
23751       }
23752 
23753       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.newBuilder()
23754       private Builder() {
23755         maybeForceBuilderInitialization();
23756       }
23757 
23758       private Builder(
23759           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
23760         super(parent);
23761         maybeForceBuilderInitialization();
23762       }
23763       private void maybeForceBuilderInitialization() {
23764         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
23765           getRegionFieldBuilder();
23766           getCallFieldBuilder();
23767         }
23768       }
23769       private static Builder create() {
23770         return new Builder();
23771       }
23772 
23773       public Builder clear() {
23774         super.clear();
23775         if (regionBuilder_ == null) {
23776           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
23777         } else {
23778           regionBuilder_.clear();
23779         }
23780         bitField0_ = (bitField0_ & ~0x00000001);
23781         if (callBuilder_ == null) {
23782           call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
23783         } else {
23784           callBuilder_.clear();
23785         }
23786         bitField0_ = (bitField0_ & ~0x00000002);
23787         return this;
23788       }
23789 
23790       public Builder clone() {
23791         return create().mergeFrom(buildPartial());
23792       }
23793 
23794       public com.google.protobuf.Descriptors.Descriptor
23795           getDescriptorForType() {
23796         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_descriptor;
23797       }
23798 
23799       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest getDefaultInstanceForType() {
23800         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
23801       }
23802 
23803       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest build() {
23804         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest result = buildPartial();
23805         if (!result.isInitialized()) {
23806           throw newUninitializedMessageException(result);
23807         }
23808         return result;
23809       }
23810 
23811       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest buildPartial() {
23812         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest(this);
23813         int from_bitField0_ = bitField0_;
23814         int to_bitField0_ = 0;
23815         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
23816           to_bitField0_ |= 0x00000001;
23817         }
23818         if (regionBuilder_ == null) {
23819           result.region_ = region_;
23820         } else {
23821           result.region_ = regionBuilder_.build();
23822         }
23823         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
23824           to_bitField0_ |= 0x00000002;
23825         }
23826         if (callBuilder_ == null) {
23827           result.call_ = call_;
23828         } else {
23829           result.call_ = callBuilder_.build();
23830         }
23831         result.bitField0_ = to_bitField0_;
23832         onBuilt();
23833         return result;
23834       }
23835 
23836       public Builder mergeFrom(com.google.protobuf.Message other) {
23837         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest) {
23838           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)other);
23839         } else {
23840           super.mergeFrom(other);
23841           return this;
23842         }
23843       }
23844 
23845       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest other) {
23846         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance()) return this;
23847         if (other.hasRegion()) {
23848           mergeRegion(other.getRegion());
23849         }
23850         if (other.hasCall()) {
23851           mergeCall(other.getCall());
23852         }
23853         this.mergeUnknownFields(other.getUnknownFields());
23854         return this;
23855       }
23856 
23857       public final boolean isInitialized() {
23858         if (!hasRegion()) {
23859           
23860           return false;
23861         }
23862         if (!hasCall()) {
23863           
23864           return false;
23865         }
23866         if (!getRegion().isInitialized()) {
23867           
23868           return false;
23869         }
23870         if (!getCall().isInitialized()) {
23871           
23872           return false;
23873         }
23874         return true;
23875       }
23876 
23877       public Builder mergeFrom(
23878           com.google.protobuf.CodedInputStream input,
23879           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23880           throws java.io.IOException {
23881         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parsedMessage = null;
23882         try {
23883           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
23884         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
23885           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest) e.getUnfinishedMessage();
23886           throw e;
23887         } finally {
23888           if (parsedMessage != null) {
23889             mergeFrom(parsedMessage);
23890           }
23891         }
23892         return this;
23893       }
23894       private int bitField0_;
23895 
23896       // required .RegionSpecifier region = 1;
23897       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
23898       private com.google.protobuf.SingleFieldBuilder<
23899           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
23900       /**
23901        * <code>required .RegionSpecifier region = 1;</code>
23902        */
23903       public boolean hasRegion() {
23904         return ((bitField0_ & 0x00000001) == 0x00000001);
23905       }
23906       /**
23907        * <code>required .RegionSpecifier region = 1;</code>
23908        */
23909       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
23910         if (regionBuilder_ == null) {
23911           return region_;
23912         } else {
23913           return regionBuilder_.getMessage();
23914         }
23915       }
23916       /**
23917        * <code>required .RegionSpecifier region = 1;</code>
23918        */
23919       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
23920         if (regionBuilder_ == null) {
23921           if (value == null) {
23922             throw new NullPointerException();
23923           }
23924           region_ = value;
23925           onChanged();
23926         } else {
23927           regionBuilder_.setMessage(value);
23928         }
23929         bitField0_ |= 0x00000001;
23930         return this;
23931       }
23932       /**
23933        * <code>required .RegionSpecifier region = 1;</code>
23934        */
23935       public Builder setRegion(
23936           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
23937         if (regionBuilder_ == null) {
23938           region_ = builderForValue.build();
23939           onChanged();
23940         } else {
23941           regionBuilder_.setMessage(builderForValue.build());
23942         }
23943         bitField0_ |= 0x00000001;
23944         return this;
23945       }
23946       /**
23947        * <code>required .RegionSpecifier region = 1;</code>
23948        */
23949       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
23950         if (regionBuilder_ == null) {
23951           if (((bitField0_ & 0x00000001) == 0x00000001) &&
23952               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
23953             region_ =
23954               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
23955           } else {
23956             region_ = value;
23957           }
23958           onChanged();
23959         } else {
23960           regionBuilder_.mergeFrom(value);
23961         }
23962         bitField0_ |= 0x00000001;
23963         return this;
23964       }
23965       /**
23966        * <code>required .RegionSpecifier region = 1;</code>
23967        */
23968       public Builder clearRegion() {
23969         if (regionBuilder_ == null) {
23970           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
23971           onChanged();
23972         } else {
23973           regionBuilder_.clear();
23974         }
23975         bitField0_ = (bitField0_ & ~0x00000001);
23976         return this;
23977       }
23978       /**
23979        * <code>required .RegionSpecifier region = 1;</code>
23980        */
23981       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
23982         bitField0_ |= 0x00000001;
23983         onChanged();
23984         return getRegionFieldBuilder().getBuilder();
23985       }
23986       /**
23987        * <code>required .RegionSpecifier region = 1;</code>
23988        */
23989       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
23990         if (regionBuilder_ != null) {
23991           return regionBuilder_.getMessageOrBuilder();
23992         } else {
23993           return region_;
23994         }
23995       }
23996       /**
23997        * <code>required .RegionSpecifier region = 1;</code>
23998        */
23999       private com.google.protobuf.SingleFieldBuilder<
24000           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> 
24001           getRegionFieldBuilder() {
24002         if (regionBuilder_ == null) {
24003           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
24004               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
24005                   region_,
24006                   getParentForChildren(),
24007                   isClean());
24008           region_ = null;
24009         }
24010         return regionBuilder_;
24011       }
24012 
24013       // required .CoprocessorServiceCall call = 2;
24014       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
24015       private com.google.protobuf.SingleFieldBuilder<
24016           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> callBuilder_;
24017       /**
24018        * <code>required .CoprocessorServiceCall call = 2;</code>
24019        */
24020       public boolean hasCall() {
24021         return ((bitField0_ & 0x00000002) == 0x00000002);
24022       }
24023       /**
24024        * <code>required .CoprocessorServiceCall call = 2;</code>
24025        */
24026       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall() {
24027         if (callBuilder_ == null) {
24028           return call_;
24029         } else {
24030           return callBuilder_.getMessage();
24031         }
24032       }
24033       /**
24034        * <code>required .CoprocessorServiceCall call = 2;</code>
24035        */
24036       public Builder setCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value) {
24037         if (callBuilder_ == null) {
24038           if (value == null) {
24039             throw new NullPointerException();
24040           }
24041           call_ = value;
24042           onChanged();
24043         } else {
24044           callBuilder_.setMessage(value);
24045         }
24046         bitField0_ |= 0x00000002;
24047         return this;
24048       }
24049       /**
24050        * <code>required .CoprocessorServiceCall call = 2;</code>
24051        */
24052       public Builder setCall(
24053           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder builderForValue) {
24054         if (callBuilder_ == null) {
24055           call_ = builderForValue.build();
24056           onChanged();
24057         } else {
24058           callBuilder_.setMessage(builderForValue.build());
24059         }
24060         bitField0_ |= 0x00000002;
24061         return this;
24062       }
24063       /**
24064        * <code>required .CoprocessorServiceCall call = 2;</code>
24065        */
24066       public Builder mergeCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value) {
24067         if (callBuilder_ == null) {
24068           if (((bitField0_ & 0x00000002) == 0x00000002) &&
24069               call_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance()) {
24070             call_ =
24071               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder(call_).mergeFrom(value).buildPartial();
24072           } else {
24073             call_ = value;
24074           }
24075           onChanged();
24076         } else {
24077           callBuilder_.mergeFrom(value);
24078         }
24079         bitField0_ |= 0x00000002;
24080         return this;
24081       }
24082       /**
24083        * <code>required .CoprocessorServiceCall call = 2;</code>
24084        */
24085       public Builder clearCall() {
24086         if (callBuilder_ == null) {
24087           call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
24088           onChanged();
24089         } else {
24090           callBuilder_.clear();
24091         }
24092         bitField0_ = (bitField0_ & ~0x00000002);
24093         return this;
24094       }
24095       /**
24096        * <code>required .CoprocessorServiceCall call = 2;</code>
24097        */
24098       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder getCallBuilder() {
24099         bitField0_ |= 0x00000002;
24100         onChanged();
24101         return getCallFieldBuilder().getBuilder();
24102       }
24103       /**
24104        * <code>required .CoprocessorServiceCall call = 2;</code>
24105        */
24106       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder() {
24107         if (callBuilder_ != null) {
24108           return callBuilder_.getMessageOrBuilder();
24109         } else {
24110           return call_;
24111         }
24112       }
24113       /**
24114        * <code>required .CoprocessorServiceCall call = 2;</code>
24115        */
24116       private com.google.protobuf.SingleFieldBuilder<
24117           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> 
24118           getCallFieldBuilder() {
24119         if (callBuilder_ == null) {
24120           callBuilder_ = new com.google.protobuf.SingleFieldBuilder<
24121               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder>(
24122                   call_,
24123                   getParentForChildren(),
24124                   isClean());
24125           call_ = null;
24126         }
24127         return callBuilder_;
24128       }
24129 
24130       // @@protoc_insertion_point(builder_scope:CoprocessorServiceRequest)
24131     }
24132 
24133     static {
24134       defaultInstance = new CoprocessorServiceRequest(true);
24135       defaultInstance.initFields();
24136     }
24137 
24138     // @@protoc_insertion_point(class_scope:CoprocessorServiceRequest)
24139   }
24140 
24141   public interface CoprocessorServiceResponseOrBuilder
24142       extends com.google.protobuf.MessageOrBuilder {
24143 
24144     // required .RegionSpecifier region = 1;
24145     /**
24146      * <code>required .RegionSpecifier region = 1;</code>
24147      */
24148     boolean hasRegion();
24149     /**
24150      * <code>required .RegionSpecifier region = 1;</code>
24151      */
24152     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
24153     /**
24154      * <code>required .RegionSpecifier region = 1;</code>
24155      */
24156     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
24157 
24158     // required .NameBytesPair value = 2;
24159     /**
24160      * <code>required .NameBytesPair value = 2;</code>
24161      */
24162     boolean hasValue();
24163     /**
24164      * <code>required .NameBytesPair value = 2;</code>
24165      */
24166     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue();
24167     /**
24168      * <code>required .NameBytesPair value = 2;</code>
24169      */
24170     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder();
24171   }
24172   /**
24173    * Protobuf type {@code CoprocessorServiceResponse}
24174    */
24175   public static final class CoprocessorServiceResponse extends
24176       com.google.protobuf.GeneratedMessage
24177       implements CoprocessorServiceResponseOrBuilder {
24178     // Use CoprocessorServiceResponse.newBuilder() to construct.
24179     private CoprocessorServiceResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
24180       super(builder);
24181       this.unknownFields = builder.getUnknownFields();
24182     }
24183     private CoprocessorServiceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
24184 
24185     private static final CoprocessorServiceResponse defaultInstance;
24186     public static CoprocessorServiceResponse getDefaultInstance() {
24187       return defaultInstance;
24188     }
24189 
24190     public CoprocessorServiceResponse getDefaultInstanceForType() {
24191       return defaultInstance;
24192     }
24193 
24194     private final com.google.protobuf.UnknownFieldSet unknownFields;
24195     @java.lang.Override
24196     public final com.google.protobuf.UnknownFieldSet
24197         getUnknownFields() {
24198       return this.unknownFields;
24199     }
24200     private CoprocessorServiceResponse(
24201         com.google.protobuf.CodedInputStream input,
24202         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24203         throws com.google.protobuf.InvalidProtocolBufferException {
24204       initFields();
24205       int mutable_bitField0_ = 0;
24206       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
24207           com.google.protobuf.UnknownFieldSet.newBuilder();
24208       try {
24209         boolean done = false;
24210         while (!done) {
24211           int tag = input.readTag();
24212           switch (tag) {
24213             case 0:
24214               done = true;
24215               break;
24216             default: {
24217               if (!parseUnknownField(input, unknownFields,
24218                                      extensionRegistry, tag)) {
24219                 done = true;
24220               }
24221               break;
24222             }
24223             case 10: {
24224               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
24225               if (((bitField0_ & 0x00000001) == 0x00000001)) {
24226                 subBuilder = region_.toBuilder();
24227               }
24228               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
24229               if (subBuilder != null) {
24230                 subBuilder.mergeFrom(region_);
24231                 region_ = subBuilder.buildPartial();
24232               }
24233               bitField0_ |= 0x00000001;
24234               break;
24235             }
24236             case 18: {
24237               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null;
24238               if (((bitField0_ & 0x00000002) == 0x00000002)) {
24239                 subBuilder = value_.toBuilder();
24240               }
24241               value_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry);
24242               if (subBuilder != null) {
24243                 subBuilder.mergeFrom(value_);
24244                 value_ = subBuilder.buildPartial();
24245               }
24246               bitField0_ |= 0x00000002;
24247               break;
24248             }
24249           }
24250         }
24251       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
24252         throw e.setUnfinishedMessage(this);
24253       } catch (java.io.IOException e) {
24254         throw new com.google.protobuf.InvalidProtocolBufferException(
24255             e.getMessage()).setUnfinishedMessage(this);
24256       } finally {
24257         this.unknownFields = unknownFields.build();
24258         makeExtensionsImmutable();
24259       }
24260     }
24261     public static final com.google.protobuf.Descriptors.Descriptor
24262         getDescriptor() {
24263       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_descriptor;
24264     }
24265 
24266     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
24267         internalGetFieldAccessorTable() {
24268       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_fieldAccessorTable
24269           .ensureFieldAccessorsInitialized(
24270               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.Builder.class);
24271     }
24272 
24273     public static com.google.protobuf.Parser<CoprocessorServiceResponse> PARSER =
24274         new com.google.protobuf.AbstractParser<CoprocessorServiceResponse>() {
24275       public CoprocessorServiceResponse parsePartialFrom(
24276           com.google.protobuf.CodedInputStream input,
24277           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24278           throws com.google.protobuf.InvalidProtocolBufferException {
24279         return new CoprocessorServiceResponse(input, extensionRegistry);
24280       }
24281     };
24282 
24283     @java.lang.Override
24284     public com.google.protobuf.Parser<CoprocessorServiceResponse> getParserForType() {
24285       return PARSER;
24286     }
24287 
24288     private int bitField0_;
24289     // required .RegionSpecifier region = 1;
24290     public static final int REGION_FIELD_NUMBER = 1;
24291     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
24292     /**
24293      * <code>required .RegionSpecifier region = 1;</code>
24294      */
24295     public boolean hasRegion() {
24296       return ((bitField0_ & 0x00000001) == 0x00000001);
24297     }
24298     /**
24299      * <code>required .RegionSpecifier region = 1;</code>
24300      */
24301     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
24302       return region_;
24303     }
24304     /**
24305      * <code>required .RegionSpecifier region = 1;</code>
24306      */
24307     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
24308       return region_;
24309     }
24310 
24311     // required .NameBytesPair value = 2;
24312     public static final int VALUE_FIELD_NUMBER = 2;
24313     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_;
24314     /**
24315      * <code>required .NameBytesPair value = 2;</code>
24316      */
24317     public boolean hasValue() {
24318       return ((bitField0_ & 0x00000002) == 0x00000002);
24319     }
24320     /**
24321      * <code>required .NameBytesPair value = 2;</code>
24322      */
24323     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() {
24324       return value_;
24325     }
24326     /**
24327      * <code>required .NameBytesPair value = 2;</code>
24328      */
24329     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() {
24330       return value_;
24331     }
24332 
24333     private void initFields() {
24334       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
24335       value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
24336     }
24337     private byte memoizedIsInitialized = -1;
24338     public final boolean isInitialized() {
24339       byte isInitialized = memoizedIsInitialized;
24340       if (isInitialized != -1) return isInitialized == 1;
24341 
24342       if (!hasRegion()) {
24343         memoizedIsInitialized = 0;
24344         return false;
24345       }
24346       if (!hasValue()) {
24347         memoizedIsInitialized = 0;
24348         return false;
24349       }
24350       if (!getRegion().isInitialized()) {
24351         memoizedIsInitialized = 0;
24352         return false;
24353       }
24354       if (!getValue().isInitialized()) {
24355         memoizedIsInitialized = 0;
24356         return false;
24357       }
24358       memoizedIsInitialized = 1;
24359       return true;
24360     }
24361 
24362     public void writeTo(com.google.protobuf.CodedOutputStream output)
24363                         throws java.io.IOException {
24364       getSerializedSize();
24365       if (((bitField0_ & 0x00000001) == 0x00000001)) {
24366         output.writeMessage(1, region_);
24367       }
24368       if (((bitField0_ & 0x00000002) == 0x00000002)) {
24369         output.writeMessage(2, value_);
24370       }
24371       getUnknownFields().writeTo(output);
24372     }
24373 
24374     private int memoizedSerializedSize = -1;
24375     public int getSerializedSize() {
24376       int size = memoizedSerializedSize;
24377       if (size != -1) return size;
24378 
24379       size = 0;
24380       if (((bitField0_ & 0x00000001) == 0x00000001)) {
24381         size += com.google.protobuf.CodedOutputStream
24382           .computeMessageSize(1, region_);
24383       }
24384       if (((bitField0_ & 0x00000002) == 0x00000002)) {
24385         size += com.google.protobuf.CodedOutputStream
24386           .computeMessageSize(2, value_);
24387       }
24388       size += getUnknownFields().getSerializedSize();
24389       memoizedSerializedSize = size;
24390       return size;
24391     }
24392 
24393     private static final long serialVersionUID = 0L;
24394     @java.lang.Override
24395     protected java.lang.Object writeReplace()
24396         throws java.io.ObjectStreamException {
24397       return super.writeReplace();
24398     }
24399 
24400     @java.lang.Override
24401     public boolean equals(final java.lang.Object obj) {
24402       if (obj == this) {
24403        return true;
24404       }
24405       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse)) {
24406         return super.equals(obj);
24407       }
24408       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) obj;
24409 
24410       boolean result = true;
24411       result = result && (hasRegion() == other.hasRegion());
24412       if (hasRegion()) {
24413         result = result && getRegion()
24414             .equals(other.getRegion());
24415       }
24416       result = result && (hasValue() == other.hasValue());
24417       if (hasValue()) {
24418         result = result && getValue()
24419             .equals(other.getValue());
24420       }
24421       result = result &&
24422           getUnknownFields().equals(other.getUnknownFields());
24423       return result;
24424     }
24425 
24426     private int memoizedHashCode = 0;
24427     @java.lang.Override
24428     public int hashCode() {
24429       if (memoizedHashCode != 0) {
24430         return memoizedHashCode;
24431       }
24432       int hash = 41;
24433       hash = (19 * hash) + getDescriptorForType().hashCode();
24434       if (hasRegion()) {
24435         hash = (37 * hash) + REGION_FIELD_NUMBER;
24436         hash = (53 * hash) + getRegion().hashCode();
24437       }
24438       if (hasValue()) {
24439         hash = (37 * hash) + VALUE_FIELD_NUMBER;
24440         hash = (53 * hash) + getValue().hashCode();
24441       }
24442       hash = (29 * hash) + getUnknownFields().hashCode();
24443       memoizedHashCode = hash;
24444       return hash;
24445     }
24446 
24447     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(
24448         com.google.protobuf.ByteString data)
24449         throws com.google.protobuf.InvalidProtocolBufferException {
24450       return PARSER.parseFrom(data);
24451     }
24452     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(
24453         com.google.protobuf.ByteString data,
24454         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24455         throws com.google.protobuf.InvalidProtocolBufferException {
24456       return PARSER.parseFrom(data, extensionRegistry);
24457     }
24458     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(byte[] data)
24459         throws com.google.protobuf.InvalidProtocolBufferException {
24460       return PARSER.parseFrom(data);
24461     }
24462     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(
24463         byte[] data,
24464         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24465         throws com.google.protobuf.InvalidProtocolBufferException {
24466       return PARSER.parseFrom(data, extensionRegistry);
24467     }
24468     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(java.io.InputStream input)
24469         throws java.io.IOException {
24470       return PARSER.parseFrom(input);
24471     }
24472     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(
24473         java.io.InputStream input,
24474         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24475         throws java.io.IOException {
24476       return PARSER.parseFrom(input, extensionRegistry);
24477     }
24478     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseDelimitedFrom(java.io.InputStream input)
24479         throws java.io.IOException {
24480       return PARSER.parseDelimitedFrom(input);
24481     }
24482     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseDelimitedFrom(
24483         java.io.InputStream input,
24484         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24485         throws java.io.IOException {
24486       return PARSER.parseDelimitedFrom(input, extensionRegistry);
24487     }
24488     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(
24489         com.google.protobuf.CodedInputStream input)
24490         throws java.io.IOException {
24491       return PARSER.parseFrom(input);
24492     }
24493     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(
24494         com.google.protobuf.CodedInputStream input,
24495         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24496         throws java.io.IOException {
24497       return PARSER.parseFrom(input, extensionRegistry);
24498     }
24499 
24500     public static Builder newBuilder() { return Builder.create(); }
24501     public Builder newBuilderForType() { return newBuilder(); }
24502     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse prototype) {
24503       return newBuilder().mergeFrom(prototype);
24504     }
24505     public Builder toBuilder() { return newBuilder(this); }
24506 
24507     @java.lang.Override
24508     protected Builder newBuilderForType(
24509         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
24510       Builder builder = new Builder(parent);
24511       return builder;
24512     }
24513     /**
24514      * Protobuf type {@code CoprocessorServiceResponse}
24515      */
24516     public static final class Builder extends
24517         com.google.protobuf.GeneratedMessage.Builder<Builder>
24518        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrBuilder {
24519       public static final com.google.protobuf.Descriptors.Descriptor
24520           getDescriptor() {
24521         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_descriptor;
24522       }
24523 
24524       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
24525           internalGetFieldAccessorTable() {
24526         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_fieldAccessorTable
24527             .ensureFieldAccessorsInitialized(
24528                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.Builder.class);
24529       }
24530 
24531       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.newBuilder()
24532       private Builder() {
24533         maybeForceBuilderInitialization();
24534       }
24535 
24536       private Builder(
24537           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
24538         super(parent);
24539         maybeForceBuilderInitialization();
24540       }
24541       private void maybeForceBuilderInitialization() {
24542         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
24543           getRegionFieldBuilder();
24544           getValueFieldBuilder();
24545         }
24546       }
24547       private static Builder create() {
24548         return new Builder();
24549       }
24550 
24551       public Builder clear() {
24552         super.clear();
24553         if (regionBuilder_ == null) {
24554           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
24555         } else {
24556           regionBuilder_.clear();
24557         }
24558         bitField0_ = (bitField0_ & ~0x00000001);
24559         if (valueBuilder_ == null) {
24560           value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
24561         } else {
24562           valueBuilder_.clear();
24563         }
24564         bitField0_ = (bitField0_ & ~0x00000002);
24565         return this;
24566       }
24567 
24568       public Builder clone() {
24569         return create().mergeFrom(buildPartial());
24570       }
24571 
24572       public com.google.protobuf.Descriptors.Descriptor
24573           getDescriptorForType() {
24574         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_descriptor;
24575       }
24576 
24577       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse getDefaultInstanceForType() {
24578         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
24579       }
24580 
24581       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse build() {
24582         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse result = buildPartial();
24583         if (!result.isInitialized()) {
24584           throw newUninitializedMessageException(result);
24585         }
24586         return result;
24587       }
24588 
24589       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse buildPartial() {
24590         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse(this);
24591         int from_bitField0_ = bitField0_;
24592         int to_bitField0_ = 0;
24593         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
24594           to_bitField0_ |= 0x00000001;
24595         }
24596         if (regionBuilder_ == null) {
24597           result.region_ = region_;
24598         } else {
24599           result.region_ = regionBuilder_.build();
24600         }
24601         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
24602           to_bitField0_ |= 0x00000002;
24603         }
24604         if (valueBuilder_ == null) {
24605           result.value_ = value_;
24606         } else {
24607           result.value_ = valueBuilder_.build();
24608         }
24609         result.bitField0_ = to_bitField0_;
24610         onBuilt();
24611         return result;
24612       }
24613 
24614       public Builder mergeFrom(com.google.protobuf.Message other) {
24615         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) {
24616           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse)other);
24617         } else {
24618           super.mergeFrom(other);
24619           return this;
24620         }
24621       }
24622 
24623       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse other) {
24624         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()) return this;
24625         if (other.hasRegion()) {
24626           mergeRegion(other.getRegion());
24627         }
24628         if (other.hasValue()) {
24629           mergeValue(other.getValue());
24630         }
24631         this.mergeUnknownFields(other.getUnknownFields());
24632         return this;
24633       }
24634 
24635       public final boolean isInitialized() {
24636         if (!hasRegion()) {
24637           
24638           return false;
24639         }
24640         if (!hasValue()) {
24641           
24642           return false;
24643         }
24644         if (!getRegion().isInitialized()) {
24645           
24646           return false;
24647         }
24648         if (!getValue().isInitialized()) {
24649           
24650           return false;
24651         }
24652         return true;
24653       }
24654 
24655       public Builder mergeFrom(
24656           com.google.protobuf.CodedInputStream input,
24657           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24658           throws java.io.IOException {
24659         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parsedMessage = null;
24660         try {
24661           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
24662         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
24663           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) e.getUnfinishedMessage();
24664           throw e;
24665         } finally {
24666           if (parsedMessage != null) {
24667             mergeFrom(parsedMessage);
24668           }
24669         }
24670         return this;
24671       }
24672       private int bitField0_;
24673 
24674       // required .RegionSpecifier region = 1;
24675       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
24676       private com.google.protobuf.SingleFieldBuilder<
24677           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
24678       /**
24679        * <code>required .RegionSpecifier region = 1;</code>
24680        */
24681       public boolean hasRegion() {
24682         return ((bitField0_ & 0x00000001) == 0x00000001);
24683       }
24684       /**
24685        * <code>required .RegionSpecifier region = 1;</code>
24686        */
24687       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
24688         if (regionBuilder_ == null) {
24689           return region_;
24690         } else {
24691           return regionBuilder_.getMessage();
24692         }
24693       }
24694       /**
24695        * <code>required .RegionSpecifier region = 1;</code>
24696        */
24697       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
24698         if (regionBuilder_ == null) {
24699           if (value == null) {
24700             throw new NullPointerException();
24701           }
24702           region_ = value;
24703           onChanged();
24704         } else {
24705           regionBuilder_.setMessage(value);
24706         }
24707         bitField0_ |= 0x00000001;
24708         return this;
24709       }
24710       /**
24711        * <code>required .RegionSpecifier region = 1;</code>
24712        */
24713       public Builder setRegion(
24714           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
24715         if (regionBuilder_ == null) {
24716           region_ = builderForValue.build();
24717           onChanged();
24718         } else {
24719           regionBuilder_.setMessage(builderForValue.build());
24720         }
24721         bitField0_ |= 0x00000001;
24722         return this;
24723       }
24724       /**
24725        * <code>required .RegionSpecifier region = 1;</code>
24726        */
24727       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
24728         if (regionBuilder_ == null) {
24729           if (((bitField0_ & 0x00000001) == 0x00000001) &&
24730               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
24731             region_ =
24732               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
24733           } else {
24734             region_ = value;
24735           }
24736           onChanged();
24737         } else {
24738           regionBuilder_.mergeFrom(value);
24739         }
24740         bitField0_ |= 0x00000001;
24741         return this;
24742       }
24743       /**
24744        * <code>required .RegionSpecifier region = 1;</code>
24745        */
24746       public Builder clearRegion() {
24747         if (regionBuilder_ == null) {
24748           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
24749           onChanged();
24750         } else {
24751           regionBuilder_.clear();
24752         }
24753         bitField0_ = (bitField0_ & ~0x00000001);
24754         return this;
24755       }
24756       /**
24757        * <code>required .RegionSpecifier region = 1;</code>
24758        */
24759       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
24760         bitField0_ |= 0x00000001;
24761         onChanged();
24762         return getRegionFieldBuilder().getBuilder();
24763       }
24764       /**
24765        * <code>required .RegionSpecifier region = 1;</code>
24766        */
24767       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
24768         if (regionBuilder_ != null) {
24769           return regionBuilder_.getMessageOrBuilder();
24770         } else {
24771           return region_;
24772         }
24773       }
24774       /**
24775        * <code>required .RegionSpecifier region = 1;</code>
24776        */
24777       private com.google.protobuf.SingleFieldBuilder<
24778           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> 
24779           getRegionFieldBuilder() {
24780         if (regionBuilder_ == null) {
24781           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
24782               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
24783                   region_,
24784                   getParentForChildren(),
24785                   isClean());
24786           region_ = null;
24787         }
24788         return regionBuilder_;
24789       }
24790 
24791       // required .NameBytesPair value = 2;
24792       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
24793       private com.google.protobuf.SingleFieldBuilder<
24794           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_;
24795       /**
24796        * <code>required .NameBytesPair value = 2;</code>
24797        */
24798       public boolean hasValue() {
24799         return ((bitField0_ & 0x00000002) == 0x00000002);
24800       }
24801       /**
24802        * <code>required .NameBytesPair value = 2;</code>
24803        */
24804       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() {
24805         if (valueBuilder_ == null) {
24806           return value_;
24807         } else {
24808           return valueBuilder_.getMessage();
24809         }
24810       }
24811       /**
24812        * <code>required .NameBytesPair value = 2;</code>
24813        */
24814       public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
24815         if (valueBuilder_ == null) {
24816           if (value == null) {
24817             throw new NullPointerException();
24818           }
24819           value_ = value;
24820           onChanged();
24821         } else {
24822           valueBuilder_.setMessage(value);
24823         }
24824         bitField0_ |= 0x00000002;
24825         return this;
24826       }
24827       /**
24828        * <code>required .NameBytesPair value = 2;</code>
24829        */
24830       public Builder setValue(
24831           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
24832         if (valueBuilder_ == null) {
24833           value_ = builderForValue.build();
24834           onChanged();
24835         } else {
24836           valueBuilder_.setMessage(builderForValue.build());
24837         }
24838         bitField0_ |= 0x00000002;
24839         return this;
24840       }
24841       /**
24842        * <code>required .NameBytesPair value = 2;</code>
24843        */
24844       public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
24845         if (valueBuilder_ == null) {
24846           if (((bitField0_ & 0x00000002) == 0x00000002) &&
24847               value_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) {
24848             value_ =
24849               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial();
24850           } else {
24851             value_ = value;
24852           }
24853           onChanged();
24854         } else {
24855           valueBuilder_.mergeFrom(value);
24856         }
24857         bitField0_ |= 0x00000002;
24858         return this;
24859       }
24860       /**
24861        * <code>required .NameBytesPair value = 2;</code>
24862        */
24863       public Builder clearValue() {
24864         if (valueBuilder_ == null) {
24865           value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
24866           onChanged();
24867         } else {
24868           valueBuilder_.clear();
24869         }
24870         bitField0_ = (bitField0_ & ~0x00000002);
24871         return this;
24872       }
24873       /**
24874        * <code>required .NameBytesPair value = 2;</code>
24875        */
24876       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getValueBuilder() {
24877         bitField0_ |= 0x00000002;
24878         onChanged();
24879         return getValueFieldBuilder().getBuilder();
24880       }
24881       /**
24882        * <code>required .NameBytesPair value = 2;</code>
24883        */
24884       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() {
24885         if (valueBuilder_ != null) {
24886           return valueBuilder_.getMessageOrBuilder();
24887         } else {
24888           return value_;
24889         }
24890       }
24891       /**
24892        * <code>required .NameBytesPair value = 2;</code>
24893        */
24894       private com.google.protobuf.SingleFieldBuilder<
24895           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
24896           getValueFieldBuilder() {
24897         if (valueBuilder_ == null) {
24898           valueBuilder_ = new com.google.protobuf.SingleFieldBuilder<
24899               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
24900                   value_,
24901                   getParentForChildren(),
24902                   isClean());
24903           value_ = null;
24904         }
24905         return valueBuilder_;
24906       }
24907 
24908       // @@protoc_insertion_point(builder_scope:CoprocessorServiceResponse)
24909     }
24910 
24911     static {
24912       defaultInstance = new CoprocessorServiceResponse(true);
24913       defaultInstance.initFields();
24914     }
24915 
24916     // @@protoc_insertion_point(class_scope:CoprocessorServiceResponse)
24917   }
24918 
24919   public interface ActionOrBuilder
24920       extends com.google.protobuf.MessageOrBuilder {
24921 
24922     // optional uint32 index = 1;
24923     /**
24924      * <code>optional uint32 index = 1;</code>
24925      *
24926      * <pre>
24927      * If part of a multi action, useful aligning
24928      * result with what was originally submitted.
24929      * </pre>
24930      */
24931     boolean hasIndex();
24932     /**
24933      * <code>optional uint32 index = 1;</code>
24934      *
24935      * <pre>
24936      * If part of a multi action, useful aligning
24937      * result with what was originally submitted.
24938      * </pre>
24939      */
24940     int getIndex();
24941 
24942     // optional .MutationProto mutation = 2;
24943     /**
24944      * <code>optional .MutationProto mutation = 2;</code>
24945      */
24946     boolean hasMutation();
24947     /**
24948      * <code>optional .MutationProto mutation = 2;</code>
24949      */
24950     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation();
24951     /**
24952      * <code>optional .MutationProto mutation = 2;</code>
24953      */
24954     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder();
24955 
24956     // optional .Get get = 3;
24957     /**
24958      * <code>optional .Get get = 3;</code>
24959      */
24960     boolean hasGet();
24961     /**
24962      * <code>optional .Get get = 3;</code>
24963      */
24964     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet();
24965     /**
24966      * <code>optional .Get get = 3;</code>
24967      */
24968     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder();
24969 
24970     // optional .CoprocessorServiceCall service_call = 4;
24971     /**
24972      * <code>optional .CoprocessorServiceCall service_call = 4;</code>
24973      */
24974     boolean hasServiceCall();
24975     /**
24976      * <code>optional .CoprocessorServiceCall service_call = 4;</code>
24977      */
24978     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getServiceCall();
24979     /**
24980      * <code>optional .CoprocessorServiceCall service_call = 4;</code>
24981      */
24982     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getServiceCallOrBuilder();
24983   }
24984   /**
24985    * Protobuf type {@code Action}
24986    *
24987    * <pre>
24988    * Either a Get or a Mutation
24989    * </pre>
24990    */
24991   public static final class Action extends
24992       com.google.protobuf.GeneratedMessage
24993       implements ActionOrBuilder {
24994     // Use Action.newBuilder() to construct.
24995     private Action(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
24996       super(builder);
24997       this.unknownFields = builder.getUnknownFields();
24998     }
24999     private Action(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
25000 
25001     private static final Action defaultInstance;
25002     public static Action getDefaultInstance() {
25003       return defaultInstance;
25004     }
25005 
25006     public Action getDefaultInstanceForType() {
25007       return defaultInstance;
25008     }
25009 
25010     private final com.google.protobuf.UnknownFieldSet unknownFields;
25011     @java.lang.Override
25012     public final com.google.protobuf.UnknownFieldSet
25013         getUnknownFields() {
25014       return this.unknownFields;
25015     }
25016     private Action(
25017         com.google.protobuf.CodedInputStream input,
25018         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25019         throws com.google.protobuf.InvalidProtocolBufferException {
25020       initFields();
25021       int mutable_bitField0_ = 0;
25022       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
25023           com.google.protobuf.UnknownFieldSet.newBuilder();
25024       try {
25025         boolean done = false;
25026         while (!done) {
25027           int tag = input.readTag();
25028           switch (tag) {
25029             case 0:
25030               done = true;
25031               break;
25032             default: {
25033               if (!parseUnknownField(input, unknownFields,
25034                                      extensionRegistry, tag)) {
25035                 done = true;
25036               }
25037               break;
25038             }
25039             case 8: {
25040               bitField0_ |= 0x00000001;
25041               index_ = input.readUInt32();
25042               break;
25043             }
25044             case 18: {
25045               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder subBuilder = null;
25046               if (((bitField0_ & 0x00000002) == 0x00000002)) {
25047                 subBuilder = mutation_.toBuilder();
25048               }
25049               mutation_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.PARSER, extensionRegistry);
25050               if (subBuilder != null) {
25051                 subBuilder.mergeFrom(mutation_);
25052                 mutation_ = subBuilder.buildPartial();
25053               }
25054               bitField0_ |= 0x00000002;
25055               break;
25056             }
25057             case 26: {
25058               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder subBuilder = null;
25059               if (((bitField0_ & 0x00000004) == 0x00000004)) {
25060                 subBuilder = get_.toBuilder();
25061               }
25062               get_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.PARSER, extensionRegistry);
25063               if (subBuilder != null) {
25064                 subBuilder.mergeFrom(get_);
25065                 get_ = subBuilder.buildPartial();
25066               }
25067               bitField0_ |= 0x00000004;
25068               break;
25069             }
25070             case 34: {
25071               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder subBuilder = null;
25072               if (((bitField0_ & 0x00000008) == 0x00000008)) {
25073                 subBuilder = serviceCall_.toBuilder();
25074               }
25075               serviceCall_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.PARSER, extensionRegistry);
25076               if (subBuilder != null) {
25077                 subBuilder.mergeFrom(serviceCall_);
25078                 serviceCall_ = subBuilder.buildPartial();
25079               }
25080               bitField0_ |= 0x00000008;
25081               break;
25082             }
25083           }
25084         }
25085       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
25086         throw e.setUnfinishedMessage(this);
25087       } catch (java.io.IOException e) {
25088         throw new com.google.protobuf.InvalidProtocolBufferException(
25089             e.getMessage()).setUnfinishedMessage(this);
25090       } finally {
25091         this.unknownFields = unknownFields.build();
25092         makeExtensionsImmutable();
25093       }
25094     }
25095     public static final com.google.protobuf.Descriptors.Descriptor
25096         getDescriptor() {
25097       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Action_descriptor;
25098     }
25099 
25100     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
25101         internalGetFieldAccessorTable() {
25102       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Action_fieldAccessorTable
25103           .ensureFieldAccessorsInitialized(
25104               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder.class);
25105     }
25106 
25107     public static com.google.protobuf.Parser<Action> PARSER =
25108         new com.google.protobuf.AbstractParser<Action>() {
25109       public Action parsePartialFrom(
25110           com.google.protobuf.CodedInputStream input,
25111           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25112           throws com.google.protobuf.InvalidProtocolBufferException {
25113         return new Action(input, extensionRegistry);
25114       }
25115     };
25116 
25117     @java.lang.Override
25118     public com.google.protobuf.Parser<Action> getParserForType() {
25119       return PARSER;
25120     }
25121 
25122     private int bitField0_;
25123     // optional uint32 index = 1;
25124     public static final int INDEX_FIELD_NUMBER = 1;
25125     private int index_;
25126     /**
25127      * <code>optional uint32 index = 1;</code>
25128      *
25129      * <pre>
25130      * If part of a multi action, useful aligning
25131      * result with what was originally submitted.
25132      * </pre>
25133      */
25134     public boolean hasIndex() {
25135       return ((bitField0_ & 0x00000001) == 0x00000001);
25136     }
25137     /**
25138      * <code>optional uint32 index = 1;</code>
25139      *
25140      * <pre>
25141      * If part of a multi action, useful aligning
25142      * result with what was originally submitted.
25143      * </pre>
25144      */
25145     public int getIndex() {
25146       return index_;
25147     }
25148 
25149     // optional .MutationProto mutation = 2;
25150     public static final int MUTATION_FIELD_NUMBER = 2;
25151     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto mutation_;
25152     /**
25153      * <code>optional .MutationProto mutation = 2;</code>
25154      */
25155     public boolean hasMutation() {
25156       return ((bitField0_ & 0x00000002) == 0x00000002);
25157     }
25158     /**
25159      * <code>optional .MutationProto mutation = 2;</code>
25160      */
25161     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation() {
25162       return mutation_;
25163     }
25164     /**
25165      * <code>optional .MutationProto mutation = 2;</code>
25166      */
25167     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() {
25168       return mutation_;
25169     }
25170 
25171     // optional .Get get = 3;
25172     public static final int GET_FIELD_NUMBER = 3;
25173     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_;
25174     /**
25175      * <code>optional .Get get = 3;</code>
25176      */
25177     public boolean hasGet() {
25178       return ((bitField0_ & 0x00000004) == 0x00000004);
25179     }
25180     /**
25181      * <code>optional .Get get = 3;</code>
25182      */
25183     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() {
25184       return get_;
25185     }
25186     /**
25187      * <code>optional .Get get = 3;</code>
25188      */
25189     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() {
25190       return get_;
25191     }
25192 
25193     // optional .CoprocessorServiceCall service_call = 4;
25194     public static final int SERVICE_CALL_FIELD_NUMBER = 4;
25195     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall serviceCall_;
25196     /**
25197      * <code>optional .CoprocessorServiceCall service_call = 4;</code>
25198      */
25199     public boolean hasServiceCall() {
25200       return ((bitField0_ & 0x00000008) == 0x00000008);
25201     }
25202     /**
25203      * <code>optional .CoprocessorServiceCall service_call = 4;</code>
25204      */
25205     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getServiceCall() {
25206       return serviceCall_;
25207     }
25208     /**
25209      * <code>optional .CoprocessorServiceCall service_call = 4;</code>
25210      */
25211     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getServiceCallOrBuilder() {
25212       return serviceCall_;
25213     }
25214 
25215     private void initFields() {
25216       index_ = 0;
25217       mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
25218       get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
25219       serviceCall_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
25220     }
25221     private byte memoizedIsInitialized = -1;
25222     public final boolean isInitialized() {
25223       byte isInitialized = memoizedIsInitialized;
25224       if (isInitialized != -1) return isInitialized == 1;
25225 
25226       if (hasMutation()) {
25227         if (!getMutation().isInitialized()) {
25228           memoizedIsInitialized = 0;
25229           return false;
25230         }
25231       }
25232       if (hasGet()) {
25233         if (!getGet().isInitialized()) {
25234           memoizedIsInitialized = 0;
25235           return false;
25236         }
25237       }
25238       if (hasServiceCall()) {
25239         if (!getServiceCall().isInitialized()) {
25240           memoizedIsInitialized = 0;
25241           return false;
25242         }
25243       }
25244       memoizedIsInitialized = 1;
25245       return true;
25246     }
25247 
25248     public void writeTo(com.google.protobuf.CodedOutputStream output)
25249                         throws java.io.IOException {
25250       getSerializedSize();
25251       if (((bitField0_ & 0x00000001) == 0x00000001)) {
25252         output.writeUInt32(1, index_);
25253       }
25254       if (((bitField0_ & 0x00000002) == 0x00000002)) {
25255         output.writeMessage(2, mutation_);
25256       }
25257       if (((bitField0_ & 0x00000004) == 0x00000004)) {
25258         output.writeMessage(3, get_);
25259       }
25260       if (((bitField0_ & 0x00000008) == 0x00000008)) {
25261         output.writeMessage(4, serviceCall_);
25262       }
25263       getUnknownFields().writeTo(output);
25264     }
25265 
25266     private int memoizedSerializedSize = -1;
25267     public int getSerializedSize() {
25268       int size = memoizedSerializedSize;
25269       if (size != -1) return size;
25270 
25271       size = 0;
25272       if (((bitField0_ & 0x00000001) == 0x00000001)) {
25273         size += com.google.protobuf.CodedOutputStream
25274           .computeUInt32Size(1, index_);
25275       }
25276       if (((bitField0_ & 0x00000002) == 0x00000002)) {
25277         size += com.google.protobuf.CodedOutputStream
25278           .computeMessageSize(2, mutation_);
25279       }
25280       if (((bitField0_ & 0x00000004) == 0x00000004)) {
25281         size += com.google.protobuf.CodedOutputStream
25282           .computeMessageSize(3, get_);
25283       }
25284       if (((bitField0_ & 0x00000008) == 0x00000008)) {
25285         size += com.google.protobuf.CodedOutputStream
25286           .computeMessageSize(4, serviceCall_);
25287       }
25288       size += getUnknownFields().getSerializedSize();
25289       memoizedSerializedSize = size;
25290       return size;
25291     }
25292 
25293     private static final long serialVersionUID = 0L;
25294     @java.lang.Override
25295     protected java.lang.Object writeReplace()
25296         throws java.io.ObjectStreamException {
25297       return super.writeReplace();
25298     }
25299 
25300     @java.lang.Override
25301     public boolean equals(final java.lang.Object obj) {
25302       if (obj == this) {
25303        return true;
25304       }
25305       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action)) {
25306         return super.equals(obj);
25307       }
25308       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action) obj;
25309 
25310       boolean result = true;
25311       result = result && (hasIndex() == other.hasIndex());
25312       if (hasIndex()) {
25313         result = result && (getIndex()
25314             == other.getIndex());
25315       }
25316       result = result && (hasMutation() == other.hasMutation());
25317       if (hasMutation()) {
25318         result = result && getMutation()
25319             .equals(other.getMutation());
25320       }
25321       result = result && (hasGet() == other.hasGet());
25322       if (hasGet()) {
25323         result = result && getGet()
25324             .equals(other.getGet());
25325       }
25326       result = result && (hasServiceCall() == other.hasServiceCall());
25327       if (hasServiceCall()) {
25328         result = result && getServiceCall()
25329             .equals(other.getServiceCall());
25330       }
25331       result = result &&
25332           getUnknownFields().equals(other.getUnknownFields());
25333       return result;
25334     }
25335 
25336     private int memoizedHashCode = 0;
25337     @java.lang.Override
25338     public int hashCode() {
25339       if (memoizedHashCode != 0) {
25340         return memoizedHashCode;
25341       }
25342       int hash = 41;
25343       hash = (19 * hash) + getDescriptorForType().hashCode();
25344       if (hasIndex()) {
25345         hash = (37 * hash) + INDEX_FIELD_NUMBER;
25346         hash = (53 * hash) + getIndex();
25347       }
25348       if (hasMutation()) {
25349         hash = (37 * hash) + MUTATION_FIELD_NUMBER;
25350         hash = (53 * hash) + getMutation().hashCode();
25351       }
25352       if (hasGet()) {
25353         hash = (37 * hash) + GET_FIELD_NUMBER;
25354         hash = (53 * hash) + getGet().hashCode();
25355       }
25356       if (hasServiceCall()) {
25357         hash = (37 * hash) + SERVICE_CALL_FIELD_NUMBER;
25358         hash = (53 * hash) + getServiceCall().hashCode();
25359       }
25360       hash = (29 * hash) + getUnknownFields().hashCode();
25361       memoizedHashCode = hash;
25362       return hash;
25363     }
25364 
25365     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(
25366         com.google.protobuf.ByteString data)
25367         throws com.google.protobuf.InvalidProtocolBufferException {
25368       return PARSER.parseFrom(data);
25369     }
25370     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(
25371         com.google.protobuf.ByteString data,
25372         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25373         throws com.google.protobuf.InvalidProtocolBufferException {
25374       return PARSER.parseFrom(data, extensionRegistry);
25375     }
25376     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(byte[] data)
25377         throws com.google.protobuf.InvalidProtocolBufferException {
25378       return PARSER.parseFrom(data);
25379     }
25380     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(
25381         byte[] data,
25382         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25383         throws com.google.protobuf.InvalidProtocolBufferException {
25384       return PARSER.parseFrom(data, extensionRegistry);
25385     }
25386     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(java.io.InputStream input)
25387         throws java.io.IOException {
25388       return PARSER.parseFrom(input);
25389     }
25390     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(
25391         java.io.InputStream input,
25392         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25393         throws java.io.IOException {
25394       return PARSER.parseFrom(input, extensionRegistry);
25395     }
25396     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseDelimitedFrom(java.io.InputStream input)
25397         throws java.io.IOException {
25398       return PARSER.parseDelimitedFrom(input);
25399     }
25400     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseDelimitedFrom(
25401         java.io.InputStream input,
25402         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25403         throws java.io.IOException {
25404       return PARSER.parseDelimitedFrom(input, extensionRegistry);
25405     }
25406     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(
25407         com.google.protobuf.CodedInputStream input)
25408         throws java.io.IOException {
25409       return PARSER.parseFrom(input);
25410     }
25411     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(
25412         com.google.protobuf.CodedInputStream input,
25413         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25414         throws java.io.IOException {
25415       return PARSER.parseFrom(input, extensionRegistry);
25416     }
25417 
25418     public static Builder newBuilder() { return Builder.create(); }
25419     public Builder newBuilderForType() { return newBuilder(); }
25420     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action prototype) {
25421       return newBuilder().mergeFrom(prototype);
25422     }
25423     public Builder toBuilder() { return newBuilder(this); }
25424 
25425     @java.lang.Override
25426     protected Builder newBuilderForType(
25427         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
25428       Builder builder = new Builder(parent);
25429       return builder;
25430     }
25431     /**
25432      * Protobuf type {@code Action}
25433      *
25434      * <pre>
25435      * Either a Get or a Mutation
25436      * </pre>
25437      */
25438     public static final class Builder extends
25439         com.google.protobuf.GeneratedMessage.Builder<Builder>
25440        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder {
25441       public static final com.google.protobuf.Descriptors.Descriptor
25442           getDescriptor() {
25443         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Action_descriptor;
25444       }
25445 
25446       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
25447           internalGetFieldAccessorTable() {
25448         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Action_fieldAccessorTable
25449             .ensureFieldAccessorsInitialized(
25450                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder.class);
25451       }
25452 
25453       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.newBuilder()
25454       private Builder() {
25455         maybeForceBuilderInitialization();
25456       }
25457 
25458       private Builder(
25459           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
25460         super(parent);
25461         maybeForceBuilderInitialization();
25462       }
25463       private void maybeForceBuilderInitialization() {
25464         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
25465           getMutationFieldBuilder();
25466           getGetFieldBuilder();
25467           getServiceCallFieldBuilder();
25468         }
25469       }
25470       private static Builder create() {
25471         return new Builder();
25472       }
25473 
25474       public Builder clear() {
25475         super.clear();
25476         index_ = 0;
25477         bitField0_ = (bitField0_ & ~0x00000001);
25478         if (mutationBuilder_ == null) {
25479           mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
25480         } else {
25481           mutationBuilder_.clear();
25482         }
25483         bitField0_ = (bitField0_ & ~0x00000002);
25484         if (getBuilder_ == null) {
25485           get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
25486         } else {
25487           getBuilder_.clear();
25488         }
25489         bitField0_ = (bitField0_ & ~0x00000004);
25490         if (serviceCallBuilder_ == null) {
25491           serviceCall_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
25492         } else {
25493           serviceCallBuilder_.clear();
25494         }
25495         bitField0_ = (bitField0_ & ~0x00000008);
25496         return this;
25497       }
25498 
25499       public Builder clone() {
25500         return create().mergeFrom(buildPartial());
25501       }
25502 
25503       public com.google.protobuf.Descriptors.Descriptor
25504           getDescriptorForType() {
25505         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Action_descriptor;
25506       }
25507 
25508       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action getDefaultInstanceForType() {
25509         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.getDefaultInstance();
25510       }
25511 
25512       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action build() {
25513         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action result = buildPartial();
25514         if (!result.isInitialized()) {
25515           throw newUninitializedMessageException(result);
25516         }
25517         return result;
25518       }
25519 
25520       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action buildPartial() {
25521         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action(this);
25522         int from_bitField0_ = bitField0_;
25523         int to_bitField0_ = 0;
25524         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
25525           to_bitField0_ |= 0x00000001;
25526         }
25527         result.index_ = index_;
25528         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
25529           to_bitField0_ |= 0x00000002;
25530         }
25531         if (mutationBuilder_ == null) {
25532           result.mutation_ = mutation_;
25533         } else {
25534           result.mutation_ = mutationBuilder_.build();
25535         }
25536         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
25537           to_bitField0_ |= 0x00000004;
25538         }
25539         if (getBuilder_ == null) {
25540           result.get_ = get_;
25541         } else {
25542           result.get_ = getBuilder_.build();
25543         }
25544         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
25545           to_bitField0_ |= 0x00000008;
25546         }
25547         if (serviceCallBuilder_ == null) {
25548           result.serviceCall_ = serviceCall_;
25549         } else {
25550           result.serviceCall_ = serviceCallBuilder_.build();
25551         }
25552         result.bitField0_ = to_bitField0_;
25553         onBuilt();
25554         return result;
25555       }
25556 
25557       public Builder mergeFrom(com.google.protobuf.Message other) {
25558         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action) {
25559           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action)other);
25560         } else {
25561           super.mergeFrom(other);
25562           return this;
25563         }
25564       }
25565 
25566       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action other) {
25567         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.getDefaultInstance()) return this;
25568         if (other.hasIndex()) {
25569           setIndex(other.getIndex());
25570         }
25571         if (other.hasMutation()) {
25572           mergeMutation(other.getMutation());
25573         }
25574         if (other.hasGet()) {
25575           mergeGet(other.getGet());
25576         }
25577         if (other.hasServiceCall()) {
25578           mergeServiceCall(other.getServiceCall());
25579         }
25580         this.mergeUnknownFields(other.getUnknownFields());
25581         return this;
25582       }
25583 
25584       public final boolean isInitialized() {
25585         if (hasMutation()) {
25586           if (!getMutation().isInitialized()) {
25587             
25588             return false;
25589           }
25590         }
25591         if (hasGet()) {
25592           if (!getGet().isInitialized()) {
25593             
25594             return false;
25595           }
25596         }
25597         if (hasServiceCall()) {
25598           if (!getServiceCall().isInitialized()) {
25599             
25600             return false;
25601           }
25602         }
25603         return true;
25604       }
25605 
25606       public Builder mergeFrom(
25607           com.google.protobuf.CodedInputStream input,
25608           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25609           throws java.io.IOException {
25610         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parsedMessage = null;
25611         try {
25612           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
25613         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
25614           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action) e.getUnfinishedMessage();
25615           throw e;
25616         } finally {
25617           if (parsedMessage != null) {
25618             mergeFrom(parsedMessage);
25619           }
25620         }
25621         return this;
25622       }
25623       private int bitField0_;
25624 
25625       // optional uint32 index = 1;
25626       private int index_ ;
25627       /**
25628        * <code>optional uint32 index = 1;</code>
25629        *
25630        * <pre>
25631        * If part of a multi action, useful aligning
25632        * result with what was originally submitted.
25633        * </pre>
25634        */
25635       public boolean hasIndex() {
25636         return ((bitField0_ & 0x00000001) == 0x00000001);
25637       }
25638       /**
25639        * <code>optional uint32 index = 1;</code>
25640        *
25641        * <pre>
25642        * If part of a multi action, useful aligning
25643        * result with what was originally submitted.
25644        * </pre>
25645        */
25646       public int getIndex() {
25647         return index_;
25648       }
25649       /**
25650        * <code>optional uint32 index = 1;</code>
25651        *
25652        * <pre>
25653        * If part of a multi action, useful aligning
25654        * result with what was originally submitted.
25655        * </pre>
25656        */
25657       public Builder setIndex(int value) {
25658         bitField0_ |= 0x00000001;
25659         index_ = value;
25660         onChanged();
25661         return this;
25662       }
25663       /**
25664        * <code>optional uint32 index = 1;</code>
25665        *
25666        * <pre>
25667        * If part of a multi action, useful aligning
25668        * result with what was originally submitted.
25669        * </pre>
25670        */
25671       public Builder clearIndex() {
25672         bitField0_ = (bitField0_ & ~0x00000001);
25673         index_ = 0;
25674         onChanged();
25675         return this;
25676       }
25677 
25678       // optional .MutationProto mutation = 2;
25679       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
25680       private com.google.protobuf.SingleFieldBuilder<
25681           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> mutationBuilder_;
25682       /**
25683        * <code>optional .MutationProto mutation = 2;</code>
25684        */
25685       public boolean hasMutation() {
25686         return ((bitField0_ & 0x00000002) == 0x00000002);
25687       }
25688       /**
25689        * <code>optional .MutationProto mutation = 2;</code>
25690        */
25691       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation() {
25692         if (mutationBuilder_ == null) {
25693           return mutation_;
25694         } else {
25695           return mutationBuilder_.getMessage();
25696         }
25697       }
25698       /**
25699        * <code>optional .MutationProto mutation = 2;</code>
25700        */
25701       public Builder setMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
25702         if (mutationBuilder_ == null) {
25703           if (value == null) {
25704             throw new NullPointerException();
25705           }
25706           mutation_ = value;
25707           onChanged();
25708         } else {
25709           mutationBuilder_.setMessage(value);
25710         }
25711         bitField0_ |= 0x00000002;
25712         return this;
25713       }
25714       /**
25715        * <code>optional .MutationProto mutation = 2;</code>
25716        */
25717       public Builder setMutation(
25718           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) {
25719         if (mutationBuilder_ == null) {
25720           mutation_ = builderForValue.build();
25721           onChanged();
25722         } else {
25723           mutationBuilder_.setMessage(builderForValue.build());
25724         }
25725         bitField0_ |= 0x00000002;
25726         return this;
25727       }
25728       /**
25729        * <code>optional .MutationProto mutation = 2;</code>
25730        */
25731       public Builder mergeMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
25732         if (mutationBuilder_ == null) {
25733           if (((bitField0_ & 0x00000002) == 0x00000002) &&
25734               mutation_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()) {
25735             mutation_ =
25736               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.newBuilder(mutation_).mergeFrom(value).buildPartial();
25737           } else {
25738             mutation_ = value;
25739           }
25740           onChanged();
25741         } else {
25742           mutationBuilder_.mergeFrom(value);
25743         }
25744         bitField0_ |= 0x00000002;
25745         return this;
25746       }
25747       /**
25748        * <code>optional .MutationProto mutation = 2;</code>
25749        */
25750       public Builder clearMutation() {
25751         if (mutationBuilder_ == null) {
25752           mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
25753           onChanged();
25754         } else {
25755           mutationBuilder_.clear();
25756         }
25757         bitField0_ = (bitField0_ & ~0x00000002);
25758         return this;
25759       }
25760       /**
25761        * <code>optional .MutationProto mutation = 2;</code>
25762        */
25763       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder getMutationBuilder() {
25764         bitField0_ |= 0x00000002;
25765         onChanged();
25766         return getMutationFieldBuilder().getBuilder();
25767       }
25768       /**
25769        * <code>optional .MutationProto mutation = 2;</code>
25770        */
25771       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() {
25772         if (mutationBuilder_ != null) {
25773           return mutationBuilder_.getMessageOrBuilder();
25774         } else {
25775           return mutation_;
25776         }
25777       }
25778       /**
25779        * <code>optional .MutationProto mutation = 2;</code>
25780        */
25781       private com.google.protobuf.SingleFieldBuilder<
25782           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> 
25783           getMutationFieldBuilder() {
25784         if (mutationBuilder_ == null) {
25785           mutationBuilder_ = new com.google.protobuf.SingleFieldBuilder<
25786               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>(
25787                   mutation_,
25788                   getParentForChildren(),
25789                   isClean());
25790           mutation_ = null;
25791         }
25792         return mutationBuilder_;
25793       }
25794 
25795       // optional .Get get = 3;
25796       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
25797       private com.google.protobuf.SingleFieldBuilder<
25798           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> getBuilder_;
25799       /**
25800        * <code>optional .Get get = 3;</code>
25801        */
25802       public boolean hasGet() {
25803         return ((bitField0_ & 0x00000004) == 0x00000004);
25804       }
25805       /**
25806        * <code>optional .Get get = 3;</code>
25807        */
25808       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() {
25809         if (getBuilder_ == null) {
25810           return get_;
25811         } else {
25812           return getBuilder_.getMessage();
25813         }
25814       }
25815       /**
25816        * <code>optional .Get get = 3;</code>
25817        */
25818       public Builder setGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) {
25819         if (getBuilder_ == null) {
25820           if (value == null) {
25821             throw new NullPointerException();
25822           }
25823           get_ = value;
25824           onChanged();
25825         } else {
25826           getBuilder_.setMessage(value);
25827         }
25828         bitField0_ |= 0x00000004;
25829         return this;
25830       }
25831       /**
25832        * <code>optional .Get get = 3;</code>
25833        */
25834       public Builder setGet(
25835           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder builderForValue) {
25836         if (getBuilder_ == null) {
25837           get_ = builderForValue.build();
25838           onChanged();
25839         } else {
25840           getBuilder_.setMessage(builderForValue.build());
25841         }
25842         bitField0_ |= 0x00000004;
25843         return this;
25844       }
25845       /**
25846        * <code>optional .Get get = 3;</code>
25847        */
25848       public Builder mergeGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) {
25849         if (getBuilder_ == null) {
25850           if (((bitField0_ & 0x00000004) == 0x00000004) &&
25851               get_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) {
25852             get_ =
25853               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(get_).mergeFrom(value).buildPartial();
25854           } else {
25855             get_ = value;
25856           }
25857           onChanged();
25858         } else {
25859           getBuilder_.mergeFrom(value);
25860         }
25861         bitField0_ |= 0x00000004;
25862         return this;
25863       }
25864       /**
25865        * <code>optional .Get get = 3;</code>
25866        */
25867       public Builder clearGet() {
25868         if (getBuilder_ == null) {
25869           get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
25870           onChanged();
25871         } else {
25872           getBuilder_.clear();
25873         }
25874         bitField0_ = (bitField0_ & ~0x00000004);
25875         return this;
25876       }
25877       /**
25878        * <code>optional .Get get = 3;</code>
25879        */
25880       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder getGetBuilder() {
25881         bitField0_ |= 0x00000004;
25882         onChanged();
25883         return getGetFieldBuilder().getBuilder();
25884       }
25885       /**
25886        * <code>optional .Get get = 3;</code>
25887        */
25888       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() {
25889         if (getBuilder_ != null) {
25890           return getBuilder_.getMessageOrBuilder();
25891         } else {
25892           return get_;
25893         }
25894       }
25895       /**
25896        * <code>optional .Get get = 3;</code>
25897        */
25898       private com.google.protobuf.SingleFieldBuilder<
25899           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> 
25900           getGetFieldBuilder() {
25901         if (getBuilder_ == null) {
25902           getBuilder_ = new com.google.protobuf.SingleFieldBuilder<
25903               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder>(
25904                   get_,
25905                   getParentForChildren(),
25906                   isClean());
25907           get_ = null;
25908         }
25909         return getBuilder_;
25910       }
25911 
25912       // optional .CoprocessorServiceCall service_call = 4;
25913       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall serviceCall_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
25914       private com.google.protobuf.SingleFieldBuilder<
25915           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> serviceCallBuilder_;
25916       /**
25917        * <code>optional .CoprocessorServiceCall service_call = 4;</code>
25918        */
25919       public boolean hasServiceCall() {
25920         return ((bitField0_ & 0x00000008) == 0x00000008);
25921       }
25922       /**
25923        * <code>optional .CoprocessorServiceCall service_call = 4;</code>
25924        */
25925       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getServiceCall() {
25926         if (serviceCallBuilder_ == null) {
25927           return serviceCall_;
25928         } else {
25929           return serviceCallBuilder_.getMessage();
25930         }
25931       }
25932       /**
25933        * <code>optional .CoprocessorServiceCall service_call = 4;</code>
25934        */
25935       public Builder setServiceCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value) {
25936         if (serviceCallBuilder_ == null) {
25937           if (value == null) {
25938             throw new NullPointerException();
25939           }
25940           serviceCall_ = value;
25941           onChanged();
25942         } else {
25943           serviceCallBuilder_.setMessage(value);
25944         }
25945         bitField0_ |= 0x00000008;
25946         return this;
25947       }
25948       /**
25949        * <code>optional .CoprocessorServiceCall service_call = 4;</code>
25950        */
25951       public Builder setServiceCall(
25952           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder builderForValue) {
25953         if (serviceCallBuilder_ == null) {
25954           serviceCall_ = builderForValue.build();
25955           onChanged();
25956         } else {
25957           serviceCallBuilder_.setMessage(builderForValue.build());
25958         }
25959         bitField0_ |= 0x00000008;
25960         return this;
25961       }
25962       /**
25963        * <code>optional .CoprocessorServiceCall service_call = 4;</code>
25964        */
25965       public Builder mergeServiceCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value) {
25966         if (serviceCallBuilder_ == null) {
25967           if (((bitField0_ & 0x00000008) == 0x00000008) &&
25968               serviceCall_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance()) {
25969             serviceCall_ =
25970               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder(serviceCall_).mergeFrom(value).buildPartial();
25971           } else {
25972             serviceCall_ = value;
25973           }
25974           onChanged();
25975         } else {
25976           serviceCallBuilder_.mergeFrom(value);
25977         }
25978         bitField0_ |= 0x00000008;
25979         return this;
25980       }
25981       /**
25982        * <code>optional .CoprocessorServiceCall service_call = 4;</code>
25983        */
25984       public Builder clearServiceCall() {
25985         if (serviceCallBuilder_ == null) {
25986           serviceCall_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
25987           onChanged();
25988         } else {
25989           serviceCallBuilder_.clear();
25990         }
25991         bitField0_ = (bitField0_ & ~0x00000008);
25992         return this;
25993       }
25994       /**
25995        * <code>optional .CoprocessorServiceCall service_call = 4;</code>
25996        */
25997       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder getServiceCallBuilder() {
25998         bitField0_ |= 0x00000008;
25999         onChanged();
26000         return getServiceCallFieldBuilder().getBuilder();
26001       }
26002       /**
26003        * <code>optional .CoprocessorServiceCall service_call = 4;</code>
26004        */
26005       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getServiceCallOrBuilder() {
26006         if (serviceCallBuilder_ != null) {
26007           return serviceCallBuilder_.getMessageOrBuilder();
26008         } else {
26009           return serviceCall_;
26010         }
26011       }
26012       /**
26013        * <code>optional .CoprocessorServiceCall service_call = 4;</code>
26014        */
26015       private com.google.protobuf.SingleFieldBuilder<
26016           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> 
26017           getServiceCallFieldBuilder() {
26018         if (serviceCallBuilder_ == null) {
26019           serviceCallBuilder_ = new com.google.protobuf.SingleFieldBuilder<
26020               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder>(
26021                   serviceCall_,
26022                   getParentForChildren(),
26023                   isClean());
26024           serviceCall_ = null;
26025         }
26026         return serviceCallBuilder_;
26027       }
26028 
26029       // @@protoc_insertion_point(builder_scope:Action)
26030     }
26031 
26032     static {
26033       defaultInstance = new Action(true);
26034       defaultInstance.initFields();
26035     }
26036 
26037     // @@protoc_insertion_point(class_scope:Action)
26038   }
26039 
26040   public interface RegionActionOrBuilder
26041       extends com.google.protobuf.MessageOrBuilder {
26042 
26043     // required .RegionSpecifier region = 1;
26044     /**
26045      * <code>required .RegionSpecifier region = 1;</code>
26046      */
26047     boolean hasRegion();
26048     /**
26049      * <code>required .RegionSpecifier region = 1;</code>
26050      */
26051     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
26052     /**
26053      * <code>required .RegionSpecifier region = 1;</code>
26054      */
26055     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
26056 
26057     // optional bool atomic = 2;
26058     /**
26059      * <code>optional bool atomic = 2;</code>
26060      *
26061      * <pre>
26062      * When set, run mutations as atomic unit.
26063      * </pre>
26064      */
26065     boolean hasAtomic();
26066     /**
26067      * <code>optional bool atomic = 2;</code>
26068      *
26069      * <pre>
26070      * When set, run mutations as atomic unit.
26071      * </pre>
26072      */
26073     boolean getAtomic();
26074 
26075     // repeated .Action action = 3;
26076     /**
26077      * <code>repeated .Action action = 3;</code>
26078      */
26079     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action> 
26080         getActionList();
26081     /**
26082      * <code>repeated .Action action = 3;</code>
26083      */
26084     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action getAction(int index);
26085     /**
26086      * <code>repeated .Action action = 3;</code>
26087      */
26088     int getActionCount();
26089     /**
26090      * <code>repeated .Action action = 3;</code>
26091      */
26092     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder> 
26093         getActionOrBuilderList();
26094     /**
26095      * <code>repeated .Action action = 3;</code>
26096      */
26097     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder getActionOrBuilder(
26098         int index);
26099   }
26100   /**
26101    * Protobuf type {@code RegionAction}
26102    *
26103    * <pre>
26104    **
26105    * Actions to run against a Region.
26106    * </pre>
26107    */
26108   public static final class RegionAction extends
26109       com.google.protobuf.GeneratedMessage
26110       implements RegionActionOrBuilder {
26111     // Use RegionAction.newBuilder() to construct.
26112     private RegionAction(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
26113       super(builder);
26114       this.unknownFields = builder.getUnknownFields();
26115     }
26116     private RegionAction(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
26117 
26118     private static final RegionAction defaultInstance;
26119     public static RegionAction getDefaultInstance() {
26120       return defaultInstance;
26121     }
26122 
26123     public RegionAction getDefaultInstanceForType() {
26124       return defaultInstance;
26125     }
26126 
26127     private final com.google.protobuf.UnknownFieldSet unknownFields;
26128     @java.lang.Override
26129     public final com.google.protobuf.UnknownFieldSet
26130         getUnknownFields() {
26131       return this.unknownFields;
26132     }
26133     private RegionAction(
26134         com.google.protobuf.CodedInputStream input,
26135         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26136         throws com.google.protobuf.InvalidProtocolBufferException {
26137       initFields();
26138       int mutable_bitField0_ = 0;
26139       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
26140           com.google.protobuf.UnknownFieldSet.newBuilder();
26141       try {
26142         boolean done = false;
26143         while (!done) {
26144           int tag = input.readTag();
26145           switch (tag) {
26146             case 0:
26147               done = true;
26148               break;
26149             default: {
26150               if (!parseUnknownField(input, unknownFields,
26151                                      extensionRegistry, tag)) {
26152                 done = true;
26153               }
26154               break;
26155             }
26156             case 10: {
26157               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
26158               if (((bitField0_ & 0x00000001) == 0x00000001)) {
26159                 subBuilder = region_.toBuilder();
26160               }
26161               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
26162               if (subBuilder != null) {
26163                 subBuilder.mergeFrom(region_);
26164                 region_ = subBuilder.buildPartial();
26165               }
26166               bitField0_ |= 0x00000001;
26167               break;
26168             }
26169             case 16: {
26170               bitField0_ |= 0x00000002;
26171               atomic_ = input.readBool();
26172               break;
26173             }
26174             case 26: {
26175               if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
26176                 action_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action>();
26177                 mutable_bitField0_ |= 0x00000004;
26178               }
26179               action_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.PARSER, extensionRegistry));
26180               break;
26181             }
26182           }
26183         }
26184       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
26185         throw e.setUnfinishedMessage(this);
26186       } catch (java.io.IOException e) {
26187         throw new com.google.protobuf.InvalidProtocolBufferException(
26188             e.getMessage()).setUnfinishedMessage(this);
26189       } finally {
26190         if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
26191           action_ = java.util.Collections.unmodifiableList(action_);
26192         }
26193         this.unknownFields = unknownFields.build();
26194         makeExtensionsImmutable();
26195       }
26196     }
26197     public static final com.google.protobuf.Descriptors.Descriptor
26198         getDescriptor() {
26199       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionAction_descriptor;
26200     }
26201 
26202     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
26203         internalGetFieldAccessorTable() {
26204       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionAction_fieldAccessorTable
26205           .ensureFieldAccessorsInitialized(
26206               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder.class);
26207     }
26208 
26209     public static com.google.protobuf.Parser<RegionAction> PARSER =
26210         new com.google.protobuf.AbstractParser<RegionAction>() {
26211       public RegionAction parsePartialFrom(
26212           com.google.protobuf.CodedInputStream input,
26213           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26214           throws com.google.protobuf.InvalidProtocolBufferException {
26215         return new RegionAction(input, extensionRegistry);
26216       }
26217     };
26218 
26219     @java.lang.Override
26220     public com.google.protobuf.Parser<RegionAction> getParserForType() {
26221       return PARSER;
26222     }
26223 
26224     private int bitField0_;
26225     // required .RegionSpecifier region = 1;
26226     public static final int REGION_FIELD_NUMBER = 1;
26227     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
26228     /**
26229      * <code>required .RegionSpecifier region = 1;</code>
26230      */
26231     public boolean hasRegion() {
26232       return ((bitField0_ & 0x00000001) == 0x00000001);
26233     }
26234     /**
26235      * <code>required .RegionSpecifier region = 1;</code>
26236      */
26237     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
26238       return region_;
26239     }
26240     /**
26241      * <code>required .RegionSpecifier region = 1;</code>
26242      */
26243     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
26244       return region_;
26245     }
26246 
26247     // optional bool atomic = 2;
26248     public static final int ATOMIC_FIELD_NUMBER = 2;
26249     private boolean atomic_;
26250     /**
26251      * <code>optional bool atomic = 2;</code>
26252      *
26253      * <pre>
26254      * When set, run mutations as atomic unit.
26255      * </pre>
26256      */
26257     public boolean hasAtomic() {
26258       return ((bitField0_ & 0x00000002) == 0x00000002);
26259     }
26260     /**
26261      * <code>optional bool atomic = 2;</code>
26262      *
26263      * <pre>
26264      * When set, run mutations as atomic unit.
26265      * </pre>
26266      */
26267     public boolean getAtomic() {
26268       return atomic_;
26269     }
26270 
26271     // repeated .Action action = 3;
26272     public static final int ACTION_FIELD_NUMBER = 3;
26273     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action> action_;
26274     /**
26275      * <code>repeated .Action action = 3;</code>
26276      */
26277     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action> getActionList() {
26278       return action_;
26279     }
26280     /**
26281      * <code>repeated .Action action = 3;</code>
26282      */
26283     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder> 
26284         getActionOrBuilderList() {
26285       return action_;
26286     }
26287     /**
26288      * <code>repeated .Action action = 3;</code>
26289      */
26290     public int getActionCount() {
26291       return action_.size();
26292     }
26293     /**
26294      * <code>repeated .Action action = 3;</code>
26295      */
26296     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action getAction(int index) {
26297       return action_.get(index);
26298     }
26299     /**
26300      * <code>repeated .Action action = 3;</code>
26301      */
26302     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder getActionOrBuilder(
26303         int index) {
26304       return action_.get(index);
26305     }
26306 
26307     private void initFields() {
26308       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
26309       atomic_ = false;
26310       action_ = java.util.Collections.emptyList();
26311     }
26312     private byte memoizedIsInitialized = -1;
26313     public final boolean isInitialized() {
26314       byte isInitialized = memoizedIsInitialized;
26315       if (isInitialized != -1) return isInitialized == 1;
26316 
26317       if (!hasRegion()) {
26318         memoizedIsInitialized = 0;
26319         return false;
26320       }
26321       if (!getRegion().isInitialized()) {
26322         memoizedIsInitialized = 0;
26323         return false;
26324       }
26325       for (int i = 0; i < getActionCount(); i++) {
26326         if (!getAction(i).isInitialized()) {
26327           memoizedIsInitialized = 0;
26328           return false;
26329         }
26330       }
26331       memoizedIsInitialized = 1;
26332       return true;
26333     }
26334 
26335     public void writeTo(com.google.protobuf.CodedOutputStream output)
26336                         throws java.io.IOException {
26337       getSerializedSize();
26338       if (((bitField0_ & 0x00000001) == 0x00000001)) {
26339         output.writeMessage(1, region_);
26340       }
26341       if (((bitField0_ & 0x00000002) == 0x00000002)) {
26342         output.writeBool(2, atomic_);
26343       }
26344       for (int i = 0; i < action_.size(); i++) {
26345         output.writeMessage(3, action_.get(i));
26346       }
26347       getUnknownFields().writeTo(output);
26348     }
26349 
26350     private int memoizedSerializedSize = -1;
26351     public int getSerializedSize() {
26352       int size = memoizedSerializedSize;
26353       if (size != -1) return size;
26354 
26355       size = 0;
26356       if (((bitField0_ & 0x00000001) == 0x00000001)) {
26357         size += com.google.protobuf.CodedOutputStream
26358           .computeMessageSize(1, region_);
26359       }
26360       if (((bitField0_ & 0x00000002) == 0x00000002)) {
26361         size += com.google.protobuf.CodedOutputStream
26362           .computeBoolSize(2, atomic_);
26363       }
26364       for (int i = 0; i < action_.size(); i++) {
26365         size += com.google.protobuf.CodedOutputStream
26366           .computeMessageSize(3, action_.get(i));
26367       }
26368       size += getUnknownFields().getSerializedSize();
26369       memoizedSerializedSize = size;
26370       return size;
26371     }
26372 
26373     private static final long serialVersionUID = 0L;
26374     @java.lang.Override
26375     protected java.lang.Object writeReplace()
26376         throws java.io.ObjectStreamException {
26377       return super.writeReplace();
26378     }
26379 
26380     @java.lang.Override
26381     public boolean equals(final java.lang.Object obj) {
26382       if (obj == this) {
26383        return true;
26384       }
26385       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction)) {
26386         return super.equals(obj);
26387       }
26388       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction) obj;
26389 
26390       boolean result = true;
26391       result = result && (hasRegion() == other.hasRegion());
26392       if (hasRegion()) {
26393         result = result && getRegion()
26394             .equals(other.getRegion());
26395       }
26396       result = result && (hasAtomic() == other.hasAtomic());
26397       if (hasAtomic()) {
26398         result = result && (getAtomic()
26399             == other.getAtomic());
26400       }
26401       result = result && getActionList()
26402           .equals(other.getActionList());
26403       result = result &&
26404           getUnknownFields().equals(other.getUnknownFields());
26405       return result;
26406     }
26407 
26408     private int memoizedHashCode = 0;
26409     @java.lang.Override
26410     public int hashCode() {
26411       if (memoizedHashCode != 0) {
26412         return memoizedHashCode;
26413       }
26414       int hash = 41;
26415       hash = (19 * hash) + getDescriptorForType().hashCode();
26416       if (hasRegion()) {
26417         hash = (37 * hash) + REGION_FIELD_NUMBER;
26418         hash = (53 * hash) + getRegion().hashCode();
26419       }
26420       if (hasAtomic()) {
26421         hash = (37 * hash) + ATOMIC_FIELD_NUMBER;
26422         hash = (53 * hash) + hashBoolean(getAtomic());
26423       }
26424       if (getActionCount() > 0) {
26425         hash = (37 * hash) + ACTION_FIELD_NUMBER;
26426         hash = (53 * hash) + getActionList().hashCode();
26427       }
26428       hash = (29 * hash) + getUnknownFields().hashCode();
26429       memoizedHashCode = hash;
26430       return hash;
26431     }
26432 
26433     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(
26434         com.google.protobuf.ByteString data)
26435         throws com.google.protobuf.InvalidProtocolBufferException {
26436       return PARSER.parseFrom(data);
26437     }
26438     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(
26439         com.google.protobuf.ByteString data,
26440         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26441         throws com.google.protobuf.InvalidProtocolBufferException {
26442       return PARSER.parseFrom(data, extensionRegistry);
26443     }
26444     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(byte[] data)
26445         throws com.google.protobuf.InvalidProtocolBufferException {
26446       return PARSER.parseFrom(data);
26447     }
26448     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(
26449         byte[] data,
26450         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26451         throws com.google.protobuf.InvalidProtocolBufferException {
26452       return PARSER.parseFrom(data, extensionRegistry);
26453     }
26454     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(java.io.InputStream input)
26455         throws java.io.IOException {
26456       return PARSER.parseFrom(input);
26457     }
26458     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(
26459         java.io.InputStream input,
26460         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26461         throws java.io.IOException {
26462       return PARSER.parseFrom(input, extensionRegistry);
26463     }
26464     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseDelimitedFrom(java.io.InputStream input)
26465         throws java.io.IOException {
26466       return PARSER.parseDelimitedFrom(input);
26467     }
26468     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseDelimitedFrom(
26469         java.io.InputStream input,
26470         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26471         throws java.io.IOException {
26472       return PARSER.parseDelimitedFrom(input, extensionRegistry);
26473     }
26474     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(
26475         com.google.protobuf.CodedInputStream input)
26476         throws java.io.IOException {
26477       return PARSER.parseFrom(input);
26478     }
26479     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(
26480         com.google.protobuf.CodedInputStream input,
26481         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26482         throws java.io.IOException {
26483       return PARSER.parseFrom(input, extensionRegistry);
26484     }
26485 
26486     public static Builder newBuilder() { return Builder.create(); }
26487     public Builder newBuilderForType() { return newBuilder(); }
26488     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction prototype) {
26489       return newBuilder().mergeFrom(prototype);
26490     }
26491     public Builder toBuilder() { return newBuilder(this); }
26492 
26493     @java.lang.Override
26494     protected Builder newBuilderForType(
26495         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
26496       Builder builder = new Builder(parent);
26497       return builder;
26498     }
26499     /**
26500      * Protobuf type {@code RegionAction}
26501      *
26502      * <pre>
26503      **
26504      * Actions to run against a Region.
26505      * </pre>
26506      */
26507     public static final class Builder extends
26508         com.google.protobuf.GeneratedMessage.Builder<Builder>
26509        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder {
26510       public static final com.google.protobuf.Descriptors.Descriptor
26511           getDescriptor() {
26512         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionAction_descriptor;
26513       }
26514 
26515       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
26516           internalGetFieldAccessorTable() {
26517         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionAction_fieldAccessorTable
26518             .ensureFieldAccessorsInitialized(
26519                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder.class);
26520       }
26521 
26522       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.newBuilder()
26523       private Builder() {
26524         maybeForceBuilderInitialization();
26525       }
26526 
26527       private Builder(
26528           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
26529         super(parent);
26530         maybeForceBuilderInitialization();
26531       }
26532       private void maybeForceBuilderInitialization() {
26533         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
26534           getRegionFieldBuilder();
26535           getActionFieldBuilder();
26536         }
26537       }
26538       private static Builder create() {
26539         return new Builder();
26540       }
26541 
26542       public Builder clear() {
26543         super.clear();
26544         if (regionBuilder_ == null) {
26545           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
26546         } else {
26547           regionBuilder_.clear();
26548         }
26549         bitField0_ = (bitField0_ & ~0x00000001);
26550         atomic_ = false;
26551         bitField0_ = (bitField0_ & ~0x00000002);
26552         if (actionBuilder_ == null) {
26553           action_ = java.util.Collections.emptyList();
26554           bitField0_ = (bitField0_ & ~0x00000004);
26555         } else {
26556           actionBuilder_.clear();
26557         }
26558         return this;
26559       }
26560 
26561       public Builder clone() {
26562         return create().mergeFrom(buildPartial());
26563       }
26564 
26565       public com.google.protobuf.Descriptors.Descriptor
26566           getDescriptorForType() {
26567         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionAction_descriptor;
26568       }
26569 
26570       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction getDefaultInstanceForType() {
26571         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.getDefaultInstance();
26572       }
26573 
26574       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction build() {
26575         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction result = buildPartial();
26576         if (!result.isInitialized()) {
26577           throw newUninitializedMessageException(result);
26578         }
26579         return result;
26580       }
26581 
26582       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction buildPartial() {
26583         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction(this);
26584         int from_bitField0_ = bitField0_;
26585         int to_bitField0_ = 0;
26586         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
26587           to_bitField0_ |= 0x00000001;
26588         }
26589         if (regionBuilder_ == null) {
26590           result.region_ = region_;
26591         } else {
26592           result.region_ = regionBuilder_.build();
26593         }
26594         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
26595           to_bitField0_ |= 0x00000002;
26596         }
26597         result.atomic_ = atomic_;
26598         if (actionBuilder_ == null) {
26599           if (((bitField0_ & 0x00000004) == 0x00000004)) {
26600             action_ = java.util.Collections.unmodifiableList(action_);
26601             bitField0_ = (bitField0_ & ~0x00000004);
26602           }
26603           result.action_ = action_;
26604         } else {
26605           result.action_ = actionBuilder_.build();
26606         }
26607         result.bitField0_ = to_bitField0_;
26608         onBuilt();
26609         return result;
26610       }
26611 
26612       public Builder mergeFrom(com.google.protobuf.Message other) {
26613         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction) {
26614           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction)other);
26615         } else {
26616           super.mergeFrom(other);
26617           return this;
26618         }
26619       }
26620 
26621       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction other) {
26622         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.getDefaultInstance()) return this;
26623         if (other.hasRegion()) {
26624           mergeRegion(other.getRegion());
26625         }
26626         if (other.hasAtomic()) {
26627           setAtomic(other.getAtomic());
26628         }
26629         if (actionBuilder_ == null) {
26630           if (!other.action_.isEmpty()) {
26631             if (action_.isEmpty()) {
26632               action_ = other.action_;
26633               bitField0_ = (bitField0_ & ~0x00000004);
26634             } else {
26635               ensureActionIsMutable();
26636               action_.addAll(other.action_);
26637             }
26638             onChanged();
26639           }
26640         } else {
26641           if (!other.action_.isEmpty()) {
26642             if (actionBuilder_.isEmpty()) {
26643               actionBuilder_.dispose();
26644               actionBuilder_ = null;
26645               action_ = other.action_;
26646               bitField0_ = (bitField0_ & ~0x00000004);
26647               actionBuilder_ = 
26648                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
26649                    getActionFieldBuilder() : null;
26650             } else {
26651               actionBuilder_.addAllMessages(other.action_);
26652             }
26653           }
26654         }
26655         this.mergeUnknownFields(other.getUnknownFields());
26656         return this;
26657       }
26658 
26659       public final boolean isInitialized() {
26660         if (!hasRegion()) {
26661           
26662           return false;
26663         }
26664         if (!getRegion().isInitialized()) {
26665           
26666           return false;
26667         }
26668         for (int i = 0; i < getActionCount(); i++) {
26669           if (!getAction(i).isInitialized()) {
26670             
26671             return false;
26672           }
26673         }
26674         return true;
26675       }
26676 
26677       public Builder mergeFrom(
26678           com.google.protobuf.CodedInputStream input,
26679           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26680           throws java.io.IOException {
26681         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parsedMessage = null;
26682         try {
26683           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
26684         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
26685           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction) e.getUnfinishedMessage();
26686           throw e;
26687         } finally {
26688           if (parsedMessage != null) {
26689             mergeFrom(parsedMessage);
26690           }
26691         }
26692         return this;
26693       }
26694       private int bitField0_;
26695 
26696       // required .RegionSpecifier region = 1;
26697       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
26698       private com.google.protobuf.SingleFieldBuilder<
26699           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
26700       /**
26701        * <code>required .RegionSpecifier region = 1;</code>
26702        */
26703       public boolean hasRegion() {
26704         return ((bitField0_ & 0x00000001) == 0x00000001);
26705       }
26706       /**
26707        * <code>required .RegionSpecifier region = 1;</code>
26708        */
26709       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
26710         if (regionBuilder_ == null) {
26711           return region_;
26712         } else {
26713           return regionBuilder_.getMessage();
26714         }
26715       }
26716       /**
26717        * <code>required .RegionSpecifier region = 1;</code>
26718        */
26719       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
26720         if (regionBuilder_ == null) {
26721           if (value == null) {
26722             throw new NullPointerException();
26723           }
26724           region_ = value;
26725           onChanged();
26726         } else {
26727           regionBuilder_.setMessage(value);
26728         }
26729         bitField0_ |= 0x00000001;
26730         return this;
26731       }
26732       /**
26733        * <code>required .RegionSpecifier region = 1;</code>
26734        */
26735       public Builder setRegion(
26736           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
26737         if (regionBuilder_ == null) {
26738           region_ = builderForValue.build();
26739           onChanged();
26740         } else {
26741           regionBuilder_.setMessage(builderForValue.build());
26742         }
26743         bitField0_ |= 0x00000001;
26744         return this;
26745       }
26746       /**
26747        * <code>required .RegionSpecifier region = 1;</code>
26748        */
26749       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
26750         if (regionBuilder_ == null) {
26751           if (((bitField0_ & 0x00000001) == 0x00000001) &&
26752               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
26753             region_ =
26754               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
26755           } else {
26756             region_ = value;
26757           }
26758           onChanged();
26759         } else {
26760           regionBuilder_.mergeFrom(value);
26761         }
26762         bitField0_ |= 0x00000001;
26763         return this;
26764       }
26765       /**
26766        * <code>required .RegionSpecifier region = 1;</code>
26767        */
26768       public Builder clearRegion() {
26769         if (regionBuilder_ == null) {
26770           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
26771           onChanged();
26772         } else {
26773           regionBuilder_.clear();
26774         }
26775         bitField0_ = (bitField0_ & ~0x00000001);
26776         return this;
26777       }
26778       /**
26779        * <code>required .RegionSpecifier region = 1;</code>
26780        */
26781       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
26782         bitField0_ |= 0x00000001;
26783         onChanged();
26784         return getRegionFieldBuilder().getBuilder();
26785       }
26786       /**
26787        * <code>required .RegionSpecifier region = 1;</code>
26788        */
26789       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
26790         if (regionBuilder_ != null) {
26791           return regionBuilder_.getMessageOrBuilder();
26792         } else {
26793           return region_;
26794         }
26795       }
26796       /**
26797        * <code>required .RegionSpecifier region = 1;</code>
26798        */
26799       private com.google.protobuf.SingleFieldBuilder<
26800           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> 
26801           getRegionFieldBuilder() {
26802         if (regionBuilder_ == null) {
26803           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
26804               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
26805                   region_,
26806                   getParentForChildren(),
26807                   isClean());
26808           region_ = null;
26809         }
26810         return regionBuilder_;
26811       }
26812 
26813       // optional bool atomic = 2;
26814       private boolean atomic_ ;
26815       /**
26816        * <code>optional bool atomic = 2;</code>
26817        *
26818        * <pre>
26819        * When set, run mutations as atomic unit.
26820        * </pre>
26821        */
26822       public boolean hasAtomic() {
26823         return ((bitField0_ & 0x00000002) == 0x00000002);
26824       }
26825       /**
26826        * <code>optional bool atomic = 2;</code>
26827        *
26828        * <pre>
26829        * When set, run mutations as atomic unit.
26830        * </pre>
26831        */
26832       public boolean getAtomic() {
26833         return atomic_;
26834       }
26835       /**
26836        * <code>optional bool atomic = 2;</code>
26837        *
26838        * <pre>
26839        * When set, run mutations as atomic unit.
26840        * </pre>
26841        */
26842       public Builder setAtomic(boolean value) {
26843         bitField0_ |= 0x00000002;
26844         atomic_ = value;
26845         onChanged();
26846         return this;
26847       }
26848       /**
26849        * <code>optional bool atomic = 2;</code>
26850        *
26851        * <pre>
26852        * When set, run mutations as atomic unit.
26853        * </pre>
26854        */
26855       public Builder clearAtomic() {
26856         bitField0_ = (bitField0_ & ~0x00000002);
26857         atomic_ = false;
26858         onChanged();
26859         return this;
26860       }
26861 
26862       // repeated .Action action = 3;
26863       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action> action_ =
26864         java.util.Collections.emptyList();
26865       private void ensureActionIsMutable() {
26866         if (!((bitField0_ & 0x00000004) == 0x00000004)) {
26867           action_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action>(action_);
26868           bitField0_ |= 0x00000004;
26869          }
26870       }
26871 
26872       private com.google.protobuf.RepeatedFieldBuilder<
26873           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder> actionBuilder_;
26874 
26875       /**
26876        * <code>repeated .Action action = 3;</code>
26877        */
26878       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action> getActionList() {
26879         if (actionBuilder_ == null) {
26880           return java.util.Collections.unmodifiableList(action_);
26881         } else {
26882           return actionBuilder_.getMessageList();
26883         }
26884       }
26885       /**
26886        * <code>repeated .Action action = 3;</code>
26887        */
26888       public int getActionCount() {
26889         if (actionBuilder_ == null) {
26890           return action_.size();
26891         } else {
26892           return actionBuilder_.getCount();
26893         }
26894       }
26895       /**
26896        * <code>repeated .Action action = 3;</code>
26897        */
26898       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action getAction(int index) {
26899         if (actionBuilder_ == null) {
26900           return action_.get(index);
26901         } else {
26902           return actionBuilder_.getMessage(index);
26903         }
26904       }
26905       /**
26906        * <code>repeated .Action action = 3;</code>
26907        */
26908       public Builder setAction(
26909           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action value) {
26910         if (actionBuilder_ == null) {
26911           if (value == null) {
26912             throw new NullPointerException();
26913           }
26914           ensureActionIsMutable();
26915           action_.set(index, value);
26916           onChanged();
26917         } else {
26918           actionBuilder_.setMessage(index, value);
26919         }
26920         return this;
26921       }
26922       /**
26923        * <code>repeated .Action action = 3;</code>
26924        */
26925       public Builder setAction(
26926           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder builderForValue) {
26927         if (actionBuilder_ == null) {
26928           ensureActionIsMutable();
26929           action_.set(index, builderForValue.build());
26930           onChanged();
26931         } else {
26932           actionBuilder_.setMessage(index, builderForValue.build());
26933         }
26934         return this;
26935       }
26936       /**
26937        * <code>repeated .Action action = 3;</code>
26938        */
26939       public Builder addAction(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action value) {
26940         if (actionBuilder_ == null) {
26941           if (value == null) {
26942             throw new NullPointerException();
26943           }
26944           ensureActionIsMutable();
26945           action_.add(value);
26946           onChanged();
26947         } else {
26948           actionBuilder_.addMessage(value);
26949         }
26950         return this;
26951       }
26952       /**
26953        * <code>repeated .Action action = 3;</code>
26954        */
26955       public Builder addAction(
26956           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action value) {
26957         if (actionBuilder_ == null) {
26958           if (value == null) {
26959             throw new NullPointerException();
26960           }
26961           ensureActionIsMutable();
26962           action_.add(index, value);
26963           onChanged();
26964         } else {
26965           actionBuilder_.addMessage(index, value);
26966         }
26967         return this;
26968       }
26969       /**
26970        * <code>repeated .Action action = 3;</code>
26971        */
26972       public Builder addAction(
26973           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder builderForValue) {
26974         if (actionBuilder_ == null) {
26975           ensureActionIsMutable();
26976           action_.add(builderForValue.build());
26977           onChanged();
26978         } else {
26979           actionBuilder_.addMessage(builderForValue.build());
26980         }
26981         return this;
26982       }
26983       /**
26984        * <code>repeated .Action action = 3;</code>
26985        */
26986       public Builder addAction(
26987           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder builderForValue) {
26988         if (actionBuilder_ == null) {
26989           ensureActionIsMutable();
26990           action_.add(index, builderForValue.build());
26991           onChanged();
26992         } else {
26993           actionBuilder_.addMessage(index, builderForValue.build());
26994         }
26995         return this;
26996       }
26997       /**
26998        * <code>repeated .Action action = 3;</code>
26999        */
27000       public Builder addAllAction(
27001           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action> values) {
27002         if (actionBuilder_ == null) {
27003           ensureActionIsMutable();
27004           super.addAll(values, action_);
27005           onChanged();
27006         } else {
27007           actionBuilder_.addAllMessages(values);
27008         }
27009         return this;
27010       }
27011       /**
27012        * <code>repeated .Action action = 3;</code>
27013        */
27014       public Builder clearAction() {
27015         if (actionBuilder_ == null) {
27016           action_ = java.util.Collections.emptyList();
27017           bitField0_ = (bitField0_ & ~0x00000004);
27018           onChanged();
27019         } else {
27020           actionBuilder_.clear();
27021         }
27022         return this;
27023       }
27024       /**
27025        * <code>repeated .Action action = 3;</code>
27026        */
27027       public Builder removeAction(int index) {
27028         if (actionBuilder_ == null) {
27029           ensureActionIsMutable();
27030           action_.remove(index);
27031           onChanged();
27032         } else {
27033           actionBuilder_.remove(index);
27034         }
27035         return this;
27036       }
27037       /**
27038        * <code>repeated .Action action = 3;</code>
27039        */
27040       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder getActionBuilder(
27041           int index) {
27042         return getActionFieldBuilder().getBuilder(index);
27043       }
27044       /**
27045        * <code>repeated .Action action = 3;</code>
27046        */
27047       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder getActionOrBuilder(
27048           int index) {
27049         if (actionBuilder_ == null) {
27050           return action_.get(index);  } else {
27051           return actionBuilder_.getMessageOrBuilder(index);
27052         }
27053       }
27054       /**
27055        * <code>repeated .Action action = 3;</code>
27056        */
27057       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder> 
27058            getActionOrBuilderList() {
27059         if (actionBuilder_ != null) {
27060           return actionBuilder_.getMessageOrBuilderList();
27061         } else {
27062           return java.util.Collections.unmodifiableList(action_);
27063         }
27064       }
27065       /**
27066        * <code>repeated .Action action = 3;</code>
27067        */
27068       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder addActionBuilder() {
27069         return getActionFieldBuilder().addBuilder(
27070             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.getDefaultInstance());
27071       }
27072       /**
27073        * <code>repeated .Action action = 3;</code>
27074        */
27075       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder addActionBuilder(
27076           int index) {
27077         return getActionFieldBuilder().addBuilder(
27078             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.getDefaultInstance());
27079       }
27080       /**
27081        * <code>repeated .Action action = 3;</code>
27082        */
27083       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder> 
27084            getActionBuilderList() {
27085         return getActionFieldBuilder().getBuilderList();
27086       }
27087       private com.google.protobuf.RepeatedFieldBuilder<
27088           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder> 
27089           getActionFieldBuilder() {
27090         if (actionBuilder_ == null) {
27091           actionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
27092               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder>(
27093                   action_,
27094                   ((bitField0_ & 0x00000004) == 0x00000004),
27095                   getParentForChildren(),
27096                   isClean());
27097           action_ = null;
27098         }
27099         return actionBuilder_;
27100       }
27101 
27102       // @@protoc_insertion_point(builder_scope:RegionAction)
27103     }
27104 
27105     static {
27106       defaultInstance = new RegionAction(true);
27107       defaultInstance.initFields();
27108     }
27109 
27110     // @@protoc_insertion_point(class_scope:RegionAction)
27111   }
27112 
27113   public interface RegionLoadStatsOrBuilder
27114       extends com.google.protobuf.MessageOrBuilder {
27115 
27116     // optional int32 memstoreLoad = 1 [default = 0];
27117     /**
27118      * <code>optional int32 memstoreLoad = 1 [default = 0];</code>
27119      *
27120      * <pre>
27121      * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
27122      * </pre>
27123      */
27124     boolean hasMemstoreLoad();
27125     /**
27126      * <code>optional int32 memstoreLoad = 1 [default = 0];</code>
27127      *
27128      * <pre>
27129      * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
27130      * </pre>
27131      */
27132     int getMemstoreLoad();
27133 
27134     // optional int32 heapOccupancy = 2 [default = 0];
27135     /**
27136      * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
27137      *
27138      * <pre>
27139      * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
27140      * We can move this to "ServerLoadStats" should we develop them.
27141      * </pre>
27142      */
27143     boolean hasHeapOccupancy();
27144     /**
27145      * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
27146      *
27147      * <pre>
27148      * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
27149      * We can move this to "ServerLoadStats" should we develop them.
27150      * </pre>
27151      */
27152     int getHeapOccupancy();
27153   }
27154   /**
27155    * Protobuf type {@code RegionLoadStats}
27156    *
27157    * <pre>
27158    *
27159    * Statistics about the current load on the region
27160    * </pre>
27161    */
27162   public static final class RegionLoadStats extends
27163       com.google.protobuf.GeneratedMessage
27164       implements RegionLoadStatsOrBuilder {
27165     // Use RegionLoadStats.newBuilder() to construct.
27166     private RegionLoadStats(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
27167       super(builder);
27168       this.unknownFields = builder.getUnknownFields();
27169     }
27170     private RegionLoadStats(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
27171 
27172     private static final RegionLoadStats defaultInstance;
27173     public static RegionLoadStats getDefaultInstance() {
27174       return defaultInstance;
27175     }
27176 
27177     public RegionLoadStats getDefaultInstanceForType() {
27178       return defaultInstance;
27179     }
27180 
27181     private final com.google.protobuf.UnknownFieldSet unknownFields;
27182     @java.lang.Override
27183     public final com.google.protobuf.UnknownFieldSet
27184         getUnknownFields() {
27185       return this.unknownFields;
27186     }
27187     private RegionLoadStats(
27188         com.google.protobuf.CodedInputStream input,
27189         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27190         throws com.google.protobuf.InvalidProtocolBufferException {
27191       initFields();
27192       int mutable_bitField0_ = 0;
27193       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
27194           com.google.protobuf.UnknownFieldSet.newBuilder();
27195       try {
27196         boolean done = false;
27197         while (!done) {
27198           int tag = input.readTag();
27199           switch (tag) {
27200             case 0:
27201               done = true;
27202               break;
27203             default: {
27204               if (!parseUnknownField(input, unknownFields,
27205                                      extensionRegistry, tag)) {
27206                 done = true;
27207               }
27208               break;
27209             }
27210             case 8: {
27211               bitField0_ |= 0x00000001;
27212               memstoreLoad_ = input.readInt32();
27213               break;
27214             }
27215             case 16: {
27216               bitField0_ |= 0x00000002;
27217               heapOccupancy_ = input.readInt32();
27218               break;
27219             }
27220           }
27221         }
27222       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
27223         throw e.setUnfinishedMessage(this);
27224       } catch (java.io.IOException e) {
27225         throw new com.google.protobuf.InvalidProtocolBufferException(
27226             e.getMessage()).setUnfinishedMessage(this);
27227       } finally {
27228         this.unknownFields = unknownFields.build();
27229         makeExtensionsImmutable();
27230       }
27231     }
27232     public static final com.google.protobuf.Descriptors.Descriptor
27233         getDescriptor() {
27234       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionLoadStats_descriptor;
27235     }
27236 
27237     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
27238         internalGetFieldAccessorTable() {
27239       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionLoadStats_fieldAccessorTable
27240           .ensureFieldAccessorsInitialized(
27241               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder.class);
27242     }
27243 
27244     public static com.google.protobuf.Parser<RegionLoadStats> PARSER =
27245         new com.google.protobuf.AbstractParser<RegionLoadStats>() {
27246       public RegionLoadStats parsePartialFrom(
27247           com.google.protobuf.CodedInputStream input,
27248           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27249           throws com.google.protobuf.InvalidProtocolBufferException {
27250         return new RegionLoadStats(input, extensionRegistry);
27251       }
27252     };
27253 
27254     @java.lang.Override
27255     public com.google.protobuf.Parser<RegionLoadStats> getParserForType() {
27256       return PARSER;
27257     }
27258 
27259     private int bitField0_;
27260     // optional int32 memstoreLoad = 1 [default = 0];
27261     public static final int MEMSTORELOAD_FIELD_NUMBER = 1;
27262     private int memstoreLoad_;
27263     /**
27264      * <code>optional int32 memstoreLoad = 1 [default = 0];</code>
27265      *
27266      * <pre>
27267      * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
27268      * </pre>
27269      */
27270     public boolean hasMemstoreLoad() {
27271       return ((bitField0_ & 0x00000001) == 0x00000001);
27272     }
27273     /**
27274      * <code>optional int32 memstoreLoad = 1 [default = 0];</code>
27275      *
27276      * <pre>
27277      * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
27278      * </pre>
27279      */
27280     public int getMemstoreLoad() {
27281       return memstoreLoad_;
27282     }
27283 
27284     // optional int32 heapOccupancy = 2 [default = 0];
27285     public static final int HEAPOCCUPANCY_FIELD_NUMBER = 2;
27286     private int heapOccupancy_;
27287     /**
27288      * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
27289      *
27290      * <pre>
27291      * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
27292      * We can move this to "ServerLoadStats" should we develop them.
27293      * </pre>
27294      */
27295     public boolean hasHeapOccupancy() {
27296       return ((bitField0_ & 0x00000002) == 0x00000002);
27297     }
27298     /**
27299      * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
27300      *
27301      * <pre>
27302      * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
27303      * We can move this to "ServerLoadStats" should we develop them.
27304      * </pre>
27305      */
27306     public int getHeapOccupancy() {
27307       return heapOccupancy_;
27308     }
27309 
27310     private void initFields() {
27311       memstoreLoad_ = 0;
27312       heapOccupancy_ = 0;
27313     }
27314     private byte memoizedIsInitialized = -1;
27315     public final boolean isInitialized() {
27316       byte isInitialized = memoizedIsInitialized;
27317       if (isInitialized != -1) return isInitialized == 1;
27318 
27319       memoizedIsInitialized = 1;
27320       return true;
27321     }
27322 
27323     public void writeTo(com.google.protobuf.CodedOutputStream output)
27324                         throws java.io.IOException {
27325       getSerializedSize();
27326       if (((bitField0_ & 0x00000001) == 0x00000001)) {
27327         output.writeInt32(1, memstoreLoad_);
27328       }
27329       if (((bitField0_ & 0x00000002) == 0x00000002)) {
27330         output.writeInt32(2, heapOccupancy_);
27331       }
27332       getUnknownFields().writeTo(output);
27333     }
27334 
27335     private int memoizedSerializedSize = -1;
27336     public int getSerializedSize() {
27337       int size = memoizedSerializedSize;
27338       if (size != -1) return size;
27339 
27340       size = 0;
27341       if (((bitField0_ & 0x00000001) == 0x00000001)) {
27342         size += com.google.protobuf.CodedOutputStream
27343           .computeInt32Size(1, memstoreLoad_);
27344       }
27345       if (((bitField0_ & 0x00000002) == 0x00000002)) {
27346         size += com.google.protobuf.CodedOutputStream
27347           .computeInt32Size(2, heapOccupancy_);
27348       }
27349       size += getUnknownFields().getSerializedSize();
27350       memoizedSerializedSize = size;
27351       return size;
27352     }
27353 
27354     private static final long serialVersionUID = 0L;
27355     @java.lang.Override
27356     protected java.lang.Object writeReplace()
27357         throws java.io.ObjectStreamException {
27358       return super.writeReplace();
27359     }
27360 
27361     @java.lang.Override
27362     public boolean equals(final java.lang.Object obj) {
27363       if (obj == this) {
27364        return true;
27365       }
27366       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats)) {
27367         return super.equals(obj);
27368       }
27369       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats) obj;
27370 
27371       boolean result = true;
27372       result = result && (hasMemstoreLoad() == other.hasMemstoreLoad());
27373       if (hasMemstoreLoad()) {
27374         result = result && (getMemstoreLoad()
27375             == other.getMemstoreLoad());
27376       }
27377       result = result && (hasHeapOccupancy() == other.hasHeapOccupancy());
27378       if (hasHeapOccupancy()) {
27379         result = result && (getHeapOccupancy()
27380             == other.getHeapOccupancy());
27381       }
27382       result = result &&
27383           getUnknownFields().equals(other.getUnknownFields());
27384       return result;
27385     }
27386 
27387     private int memoizedHashCode = 0;
27388     @java.lang.Override
27389     public int hashCode() {
27390       if (memoizedHashCode != 0) {
27391         return memoizedHashCode;
27392       }
27393       int hash = 41;
27394       hash = (19 * hash) + getDescriptorForType().hashCode();
27395       if (hasMemstoreLoad()) {
27396         hash = (37 * hash) + MEMSTORELOAD_FIELD_NUMBER;
27397         hash = (53 * hash) + getMemstoreLoad();
27398       }
27399       if (hasHeapOccupancy()) {
27400         hash = (37 * hash) + HEAPOCCUPANCY_FIELD_NUMBER;
27401         hash = (53 * hash) + getHeapOccupancy();
27402       }
27403       hash = (29 * hash) + getUnknownFields().hashCode();
27404       memoizedHashCode = hash;
27405       return hash;
27406     }
27407 
27408     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(
27409         com.google.protobuf.ByteString data)
27410         throws com.google.protobuf.InvalidProtocolBufferException {
27411       return PARSER.parseFrom(data);
27412     }
27413     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(
27414         com.google.protobuf.ByteString data,
27415         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27416         throws com.google.protobuf.InvalidProtocolBufferException {
27417       return PARSER.parseFrom(data, extensionRegistry);
27418     }
27419     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(byte[] data)
27420         throws com.google.protobuf.InvalidProtocolBufferException {
27421       return PARSER.parseFrom(data);
27422     }
27423     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(
27424         byte[] data,
27425         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27426         throws com.google.protobuf.InvalidProtocolBufferException {
27427       return PARSER.parseFrom(data, extensionRegistry);
27428     }
27429     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(java.io.InputStream input)
27430         throws java.io.IOException {
27431       return PARSER.parseFrom(input);
27432     }
27433     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(
27434         java.io.InputStream input,
27435         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27436         throws java.io.IOException {
27437       return PARSER.parseFrom(input, extensionRegistry);
27438     }
27439     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseDelimitedFrom(java.io.InputStream input)
27440         throws java.io.IOException {
27441       return PARSER.parseDelimitedFrom(input);
27442     }
27443     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseDelimitedFrom(
27444         java.io.InputStream input,
27445         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27446         throws java.io.IOException {
27447       return PARSER.parseDelimitedFrom(input, extensionRegistry);
27448     }
27449     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(
27450         com.google.protobuf.CodedInputStream input)
27451         throws java.io.IOException {
27452       return PARSER.parseFrom(input);
27453     }
27454     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(
27455         com.google.protobuf.CodedInputStream input,
27456         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27457         throws java.io.IOException {
27458       return PARSER.parseFrom(input, extensionRegistry);
27459     }
27460 
27461     public static Builder newBuilder() { return Builder.create(); }
27462     public Builder newBuilderForType() { return newBuilder(); }
27463     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats prototype) {
27464       return newBuilder().mergeFrom(prototype);
27465     }
27466     public Builder toBuilder() { return newBuilder(this); }
27467 
27468     @java.lang.Override
27469     protected Builder newBuilderForType(
27470         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
27471       Builder builder = new Builder(parent);
27472       return builder;
27473     }
27474     /**
27475      * Protobuf type {@code RegionLoadStats}
27476      *
27477      * <pre>
27478      *
27479      * Statistics about the current load on the region
27480      * </pre>
27481      */
27482     public static final class Builder extends
27483         com.google.protobuf.GeneratedMessage.Builder<Builder>
27484        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder {
27485       public static final com.google.protobuf.Descriptors.Descriptor
27486           getDescriptor() {
27487         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionLoadStats_descriptor;
27488       }
27489 
27490       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
27491           internalGetFieldAccessorTable() {
27492         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionLoadStats_fieldAccessorTable
27493             .ensureFieldAccessorsInitialized(
27494                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder.class);
27495       }
27496 
27497       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.newBuilder()
27498       private Builder() {
27499         maybeForceBuilderInitialization();
27500       }
27501 
27502       private Builder(
27503           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
27504         super(parent);
27505         maybeForceBuilderInitialization();
27506       }
27507       private void maybeForceBuilderInitialization() {
27508         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
27509         }
27510       }
27511       private static Builder create() {
27512         return new Builder();
27513       }
27514 
27515       public Builder clear() {
27516         super.clear();
27517         memstoreLoad_ = 0;
27518         bitField0_ = (bitField0_ & ~0x00000001);
27519         heapOccupancy_ = 0;
27520         bitField0_ = (bitField0_ & ~0x00000002);
27521         return this;
27522       }
27523 
27524       public Builder clone() {
27525         return create().mergeFrom(buildPartial());
27526       }
27527 
27528       public com.google.protobuf.Descriptors.Descriptor
27529           getDescriptorForType() {
27530         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionLoadStats_descriptor;
27531       }
27532 
27533       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats getDefaultInstanceForType() {
27534         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance();
27535       }
27536 
27537       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats build() {
27538         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats result = buildPartial();
27539         if (!result.isInitialized()) {
27540           throw newUninitializedMessageException(result);
27541         }
27542         return result;
27543       }
27544 
27545       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats buildPartial() {
27546         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats(this);
27547         int from_bitField0_ = bitField0_;
27548         int to_bitField0_ = 0;
27549         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
27550           to_bitField0_ |= 0x00000001;
27551         }
27552         result.memstoreLoad_ = memstoreLoad_;
27553         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
27554           to_bitField0_ |= 0x00000002;
27555         }
27556         result.heapOccupancy_ = heapOccupancy_;
27557         result.bitField0_ = to_bitField0_;
27558         onBuilt();
27559         return result;
27560       }
27561 
27562       public Builder mergeFrom(com.google.protobuf.Message other) {
27563         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats) {
27564           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats)other);
27565         } else {
27566           super.mergeFrom(other);
27567           return this;
27568         }
27569       }
27570 
27571       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats other) {
27572         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance()) return this;
27573         if (other.hasMemstoreLoad()) {
27574           setMemstoreLoad(other.getMemstoreLoad());
27575         }
27576         if (other.hasHeapOccupancy()) {
27577           setHeapOccupancy(other.getHeapOccupancy());
27578         }
27579         this.mergeUnknownFields(other.getUnknownFields());
27580         return this;
27581       }
27582 
27583       public final boolean isInitialized() {
27584         return true;
27585       }
27586 
27587       public Builder mergeFrom(
27588           com.google.protobuf.CodedInputStream input,
27589           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27590           throws java.io.IOException {
27591         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parsedMessage = null;
27592         try {
27593           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
27594         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
27595           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats) e.getUnfinishedMessage();
27596           throw e;
27597         } finally {
27598           if (parsedMessage != null) {
27599             mergeFrom(parsedMessage);
27600           }
27601         }
27602         return this;
27603       }
27604       private int bitField0_;
27605 
27606       // optional int32 memstoreLoad = 1 [default = 0];
27607       private int memstoreLoad_ ;
27608       /**
27609        * <code>optional int32 memstoreLoad = 1 [default = 0];</code>
27610        *
27611        * <pre>
27612        * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
27613        * </pre>
27614        */
27615       public boolean hasMemstoreLoad() {
27616         return ((bitField0_ & 0x00000001) == 0x00000001);
27617       }
27618       /**
27619        * <code>optional int32 memstoreLoad = 1 [default = 0];</code>
27620        *
27621        * <pre>
27622        * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
27623        * </pre>
27624        */
27625       public int getMemstoreLoad() {
27626         return memstoreLoad_;
27627       }
27628       /**
27629        * <code>optional int32 memstoreLoad = 1 [default = 0];</code>
27630        *
27631        * <pre>
27632        * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
27633        * </pre>
27634        */
27635       public Builder setMemstoreLoad(int value) {
27636         bitField0_ |= 0x00000001;
27637         memstoreLoad_ = value;
27638         onChanged();
27639         return this;
27640       }
27641       /**
27642        * <code>optional int32 memstoreLoad = 1 [default = 0];</code>
27643        *
27644        * <pre>
27645        * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
27646        * </pre>
27647        */
27648       public Builder clearMemstoreLoad() {
27649         bitField0_ = (bitField0_ & ~0x00000001);
27650         memstoreLoad_ = 0;
27651         onChanged();
27652         return this;
27653       }
27654 
27655       // optional int32 heapOccupancy = 2 [default = 0];
27656       private int heapOccupancy_ ;
27657       /**
27658        * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
27659        *
27660        * <pre>
27661        * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
27662        * We can move this to "ServerLoadStats" should we develop them.
27663        * </pre>
27664        */
27665       public boolean hasHeapOccupancy() {
27666         return ((bitField0_ & 0x00000002) == 0x00000002);
27667       }
27668       /**
27669        * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
27670        *
27671        * <pre>
27672        * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
27673        * We can move this to "ServerLoadStats" should we develop them.
27674        * </pre>
27675        */
27676       public int getHeapOccupancy() {
27677         return heapOccupancy_;
27678       }
27679       /**
27680        * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
27681        *
27682        * <pre>
27683        * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
27684        * We can move this to "ServerLoadStats" should we develop them.
27685        * </pre>
27686        */
27687       public Builder setHeapOccupancy(int value) {
27688         bitField0_ |= 0x00000002;
27689         heapOccupancy_ = value;
27690         onChanged();
27691         return this;
27692       }
27693       /**
27694        * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
27695        *
27696        * <pre>
27697        * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
27698        * We can move this to "ServerLoadStats" should we develop them.
27699        * </pre>
27700        */
27701       public Builder clearHeapOccupancy() {
27702         bitField0_ = (bitField0_ & ~0x00000002);
27703         heapOccupancy_ = 0;
27704         onChanged();
27705         return this;
27706       }
27707 
27708       // @@protoc_insertion_point(builder_scope:RegionLoadStats)
27709     }
27710 
27711     static {
27712       defaultInstance = new RegionLoadStats(true);
27713       defaultInstance.initFields();
27714     }
27715 
27716     // @@protoc_insertion_point(class_scope:RegionLoadStats)
27717   }
27718 
27719   public interface ResultOrExceptionOrBuilder
27720       extends com.google.protobuf.MessageOrBuilder {
27721 
27722     // optional uint32 index = 1;
27723     /**
27724      * <code>optional uint32 index = 1;</code>
27725      *
27726      * <pre>
27727      * If part of a multi call, save original index of the list of all
27728      * passed so can align this response w/ original request.
27729      * </pre>
27730      */
27731     boolean hasIndex();
27732     /**
27733      * <code>optional uint32 index = 1;</code>
27734      *
27735      * <pre>
27736      * If part of a multi call, save original index of the list of all
27737      * passed so can align this response w/ original request.
27738      * </pre>
27739      */
27740     int getIndex();
27741 
27742     // optional .Result result = 2;
27743     /**
27744      * <code>optional .Result result = 2;</code>
27745      */
27746     boolean hasResult();
27747     /**
27748      * <code>optional .Result result = 2;</code>
27749      */
27750     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult();
27751     /**
27752      * <code>optional .Result result = 2;</code>
27753      */
27754     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder();
27755 
27756     // optional .NameBytesPair exception = 3;
27757     /**
27758      * <code>optional .NameBytesPair exception = 3;</code>
27759      */
27760     boolean hasException();
27761     /**
27762      * <code>optional .NameBytesPair exception = 3;</code>
27763      */
27764     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException();
27765     /**
27766      * <code>optional .NameBytesPair exception = 3;</code>
27767      */
27768     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder();
27769 
27770     // optional .CoprocessorServiceResult service_result = 4;
27771     /**
27772      * <code>optional .CoprocessorServiceResult service_result = 4;</code>
27773      *
27774      * <pre>
27775      * result if this was a coprocessor service call
27776      * </pre>
27777      */
27778     boolean hasServiceResult();
27779     /**
27780      * <code>optional .CoprocessorServiceResult service_result = 4;</code>
27781      *
27782      * <pre>
27783      * result if this was a coprocessor service call
27784      * </pre>
27785      */
27786     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult getServiceResult();
27787     /**
27788      * <code>optional .CoprocessorServiceResult service_result = 4;</code>
27789      *
27790      * <pre>
27791      * result if this was a coprocessor service call
27792      * </pre>
27793      */
27794     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder getServiceResultOrBuilder();
27795 
27796     // optional .RegionLoadStats loadStats = 5;
27797     /**
27798      * <code>optional .RegionLoadStats loadStats = 5;</code>
27799      *
27800      * <pre>
27801      * current load on the region
27802      * </pre>
27803      */
27804     boolean hasLoadStats();
27805     /**
27806      * <code>optional .RegionLoadStats loadStats = 5;</code>
27807      *
27808      * <pre>
27809      * current load on the region
27810      * </pre>
27811      */
27812     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats getLoadStats();
27813     /**
27814      * <code>optional .RegionLoadStats loadStats = 5;</code>
27815      *
27816      * <pre>
27817      * current load on the region
27818      * </pre>
27819      */
27820     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder getLoadStatsOrBuilder();
27821   }
27822   /**
27823    * Protobuf type {@code ResultOrException}
27824    *
27825    * <pre>
27826    **
27827    * Either a Result or an Exception NameBytesPair (keyed by
27828    * exception name whose value is the exception stringified)
27829    * or maybe empty if no result and no exception.
27830    * </pre>
27831    */
27832   public static final class ResultOrException extends
27833       com.google.protobuf.GeneratedMessage
27834       implements ResultOrExceptionOrBuilder {
27835     // Use ResultOrException.newBuilder() to construct.
27836     private ResultOrException(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
27837       super(builder);
27838       this.unknownFields = builder.getUnknownFields();
27839     }
27840     private ResultOrException(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
27841 
27842     private static final ResultOrException defaultInstance;
27843     public static ResultOrException getDefaultInstance() {
27844       return defaultInstance;
27845     }
27846 
27847     public ResultOrException getDefaultInstanceForType() {
27848       return defaultInstance;
27849     }
27850 
27851     private final com.google.protobuf.UnknownFieldSet unknownFields;
27852     @java.lang.Override
27853     public final com.google.protobuf.UnknownFieldSet
27854         getUnknownFields() {
27855       return this.unknownFields;
27856     }
27857     private ResultOrException(
27858         com.google.protobuf.CodedInputStream input,
27859         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27860         throws com.google.protobuf.InvalidProtocolBufferException {
27861       initFields();
27862       int mutable_bitField0_ = 0;
27863       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
27864           com.google.protobuf.UnknownFieldSet.newBuilder();
27865       try {
27866         boolean done = false;
27867         while (!done) {
27868           int tag = input.readTag();
27869           switch (tag) {
27870             case 0:
27871               done = true;
27872               break;
27873             default: {
27874               if (!parseUnknownField(input, unknownFields,
27875                                      extensionRegistry, tag)) {
27876                 done = true;
27877               }
27878               break;
27879             }
27880             case 8: {
27881               bitField0_ |= 0x00000001;
27882               index_ = input.readUInt32();
27883               break;
27884             }
27885             case 18: {
27886               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = null;
27887               if (((bitField0_ & 0x00000002) == 0x00000002)) {
27888                 subBuilder = result_.toBuilder();
27889               }
27890               result_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry);
27891               if (subBuilder != null) {
27892                 subBuilder.mergeFrom(result_);
27893                 result_ = subBuilder.buildPartial();
27894               }
27895               bitField0_ |= 0x00000002;
27896               break;
27897             }
27898             case 26: {
27899               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null;
27900               if (((bitField0_ & 0x00000004) == 0x00000004)) {
27901                 subBuilder = exception_.toBuilder();
27902               }
27903               exception_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry);
27904               if (subBuilder != null) {
27905                 subBuilder.mergeFrom(exception_);
27906                 exception_ = subBuilder.buildPartial();
27907               }
27908               bitField0_ |= 0x00000004;
27909               break;
27910             }
27911             case 34: {
27912               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder subBuilder = null;
27913               if (((bitField0_ & 0x00000008) == 0x00000008)) {
27914                 subBuilder = serviceResult_.toBuilder();
27915               }
27916               serviceResult_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.PARSER, extensionRegistry);
27917               if (subBuilder != null) {
27918                 subBuilder.mergeFrom(serviceResult_);
27919                 serviceResult_ = subBuilder.buildPartial();
27920               }
27921               bitField0_ |= 0x00000008;
27922               break;
27923             }
27924             case 42: {
27925               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder subBuilder = null;
27926               if (((bitField0_ & 0x00000010) == 0x00000010)) {
27927                 subBuilder = loadStats_.toBuilder();
27928               }
27929               loadStats_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.PARSER, extensionRegistry);
27930               if (subBuilder != null) {
27931                 subBuilder.mergeFrom(loadStats_);
27932                 loadStats_ = subBuilder.buildPartial();
27933               }
27934               bitField0_ |= 0x00000010;
27935               break;
27936             }
27937           }
27938         }
27939       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
27940         throw e.setUnfinishedMessage(this);
27941       } catch (java.io.IOException e) {
27942         throw new com.google.protobuf.InvalidProtocolBufferException(
27943             e.getMessage()).setUnfinishedMessage(this);
27944       } finally {
27945         this.unknownFields = unknownFields.build();
27946         makeExtensionsImmutable();
27947       }
27948     }
27949     public static final com.google.protobuf.Descriptors.Descriptor
27950         getDescriptor() {
27951       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ResultOrException_descriptor;
27952     }
27953 
27954     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
27955         internalGetFieldAccessorTable() {
27956       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ResultOrException_fieldAccessorTable
27957           .ensureFieldAccessorsInitialized(
27958               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder.class);
27959     }
27960 
27961     public static com.google.protobuf.Parser<ResultOrException> PARSER =
27962         new com.google.protobuf.AbstractParser<ResultOrException>() {
27963       public ResultOrException parsePartialFrom(
27964           com.google.protobuf.CodedInputStream input,
27965           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27966           throws com.google.protobuf.InvalidProtocolBufferException {
27967         return new ResultOrException(input, extensionRegistry);
27968       }
27969     };
27970 
27971     @java.lang.Override
27972     public com.google.protobuf.Parser<ResultOrException> getParserForType() {
27973       return PARSER;
27974     }
27975 
27976     private int bitField0_;
27977     // optional uint32 index = 1;
27978     public static final int INDEX_FIELD_NUMBER = 1;
27979     private int index_;
27980     /**
27981      * <code>optional uint32 index = 1;</code>
27982      *
27983      * <pre>
27984      * If part of a multi call, save original index of the list of all
27985      * passed so can align this response w/ original request.
27986      * </pre>
27987      */
27988     public boolean hasIndex() {
27989       return ((bitField0_ & 0x00000001) == 0x00000001);
27990     }
27991     /**
27992      * <code>optional uint32 index = 1;</code>
27993      *
27994      * <pre>
27995      * If part of a multi call, save original index of the list of all
27996      * passed so can align this response w/ original request.
27997      * </pre>
27998      */
27999     public int getIndex() {
28000       return index_;
28001     }
28002 
28003     // optional .Result result = 2;
28004     public static final int RESULT_FIELD_NUMBER = 2;
28005     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_;
28006     /**
28007      * <code>optional .Result result = 2;</code>
28008      */
28009     public boolean hasResult() {
28010       return ((bitField0_ & 0x00000002) == 0x00000002);
28011     }
28012     /**
28013      * <code>optional .Result result = 2;</code>
28014      */
28015     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() {
28016       return result_;
28017     }
28018     /**
28019      * <code>optional .Result result = 2;</code>
28020      */
28021     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() {
28022       return result_;
28023     }
28024 
28025     // optional .NameBytesPair exception = 3;
28026     public static final int EXCEPTION_FIELD_NUMBER = 3;
28027     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_;
28028     /**
28029      * <code>optional .NameBytesPair exception = 3;</code>
28030      */
28031     public boolean hasException() {
28032       return ((bitField0_ & 0x00000004) == 0x00000004);
28033     }
28034     /**
28035      * <code>optional .NameBytesPair exception = 3;</code>
28036      */
28037     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() {
28038       return exception_;
28039     }
28040     /**
28041      * <code>optional .NameBytesPair exception = 3;</code>
28042      */
28043     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() {
28044       return exception_;
28045     }
28046 
28047     // optional .CoprocessorServiceResult service_result = 4;
28048     public static final int SERVICE_RESULT_FIELD_NUMBER = 4;
28049     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult serviceResult_;
28050     /**
28051      * <code>optional .CoprocessorServiceResult service_result = 4;</code>
28052      *
28053      * <pre>
28054      * result if this was a coprocessor service call
28055      * </pre>
28056      */
28057     public boolean hasServiceResult() {
28058       return ((bitField0_ & 0x00000008) == 0x00000008);
28059     }
28060     /**
28061      * <code>optional .CoprocessorServiceResult service_result = 4;</code>
28062      *
28063      * <pre>
28064      * result if this was a coprocessor service call
28065      * </pre>
28066      */
28067     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult getServiceResult() {
28068       return serviceResult_;
28069     }
28070     /**
28071      * <code>optional .CoprocessorServiceResult service_result = 4;</code>
28072      *
28073      * <pre>
28074      * result if this was a coprocessor service call
28075      * </pre>
28076      */
28077     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder getServiceResultOrBuilder() {
28078       return serviceResult_;
28079     }
28080 
28081     // optional .RegionLoadStats loadStats = 5;
28082     public static final int LOADSTATS_FIELD_NUMBER = 5;
28083     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats loadStats_;
28084     /**
28085      * <code>optional .RegionLoadStats loadStats = 5;</code>
28086      *
28087      * <pre>
28088      * current load on the region
28089      * </pre>
28090      */
28091     public boolean hasLoadStats() {
28092       return ((bitField0_ & 0x00000010) == 0x00000010);
28093     }
28094     /**
28095      * <code>optional .RegionLoadStats loadStats = 5;</code>
28096      *
28097      * <pre>
28098      * current load on the region
28099      * </pre>
28100      */
28101     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats getLoadStats() {
28102       return loadStats_;
28103     }
28104     /**
28105      * <code>optional .RegionLoadStats loadStats = 5;</code>
28106      *
28107      * <pre>
28108      * current load on the region
28109      * </pre>
28110      */
28111     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder getLoadStatsOrBuilder() {
28112       return loadStats_;
28113     }
28114 
28115     private void initFields() {
28116       index_ = 0;
28117       result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
28118       exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
28119       serviceResult_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance();
28120       loadStats_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance();
28121     }
28122     private byte memoizedIsInitialized = -1;
28123     public final boolean isInitialized() {
28124       byte isInitialized = memoizedIsInitialized;
28125       if (isInitialized != -1) return isInitialized == 1;
28126 
28127       if (hasException()) {
28128         if (!getException().isInitialized()) {
28129           memoizedIsInitialized = 0;
28130           return false;
28131         }
28132       }
28133       if (hasServiceResult()) {
28134         if (!getServiceResult().isInitialized()) {
28135           memoizedIsInitialized = 0;
28136           return false;
28137         }
28138       }
28139       memoizedIsInitialized = 1;
28140       return true;
28141     }
28142 
28143     public void writeTo(com.google.protobuf.CodedOutputStream output)
28144                         throws java.io.IOException {
28145       getSerializedSize();
28146       if (((bitField0_ & 0x00000001) == 0x00000001)) {
28147         output.writeUInt32(1, index_);
28148       }
28149       if (((bitField0_ & 0x00000002) == 0x00000002)) {
28150         output.writeMessage(2, result_);
28151       }
28152       if (((bitField0_ & 0x00000004) == 0x00000004)) {
28153         output.writeMessage(3, exception_);
28154       }
28155       if (((bitField0_ & 0x00000008) == 0x00000008)) {
28156         output.writeMessage(4, serviceResult_);
28157       }
28158       if (((bitField0_ & 0x00000010) == 0x00000010)) {
28159         output.writeMessage(5, loadStats_);
28160       }
28161       getUnknownFields().writeTo(output);
28162     }
28163 
28164     private int memoizedSerializedSize = -1;
28165     public int getSerializedSize() {
28166       int size = memoizedSerializedSize;
28167       if (size != -1) return size;
28168 
28169       size = 0;
28170       if (((bitField0_ & 0x00000001) == 0x00000001)) {
28171         size += com.google.protobuf.CodedOutputStream
28172           .computeUInt32Size(1, index_);
28173       }
28174       if (((bitField0_ & 0x00000002) == 0x00000002)) {
28175         size += com.google.protobuf.CodedOutputStream
28176           .computeMessageSize(2, result_);
28177       }
28178       if (((bitField0_ & 0x00000004) == 0x00000004)) {
28179         size += com.google.protobuf.CodedOutputStream
28180           .computeMessageSize(3, exception_);
28181       }
28182       if (((bitField0_ & 0x00000008) == 0x00000008)) {
28183         size += com.google.protobuf.CodedOutputStream
28184           .computeMessageSize(4, serviceResult_);
28185       }
28186       if (((bitField0_ & 0x00000010) == 0x00000010)) {
28187         size += com.google.protobuf.CodedOutputStream
28188           .computeMessageSize(5, loadStats_);
28189       }
28190       size += getUnknownFields().getSerializedSize();
28191       memoizedSerializedSize = size;
28192       return size;
28193     }
28194 
28195     private static final long serialVersionUID = 0L;
28196     @java.lang.Override
28197     protected java.lang.Object writeReplace()
28198         throws java.io.ObjectStreamException {
28199       return super.writeReplace();
28200     }
28201 
28202     @java.lang.Override
28203     public boolean equals(final java.lang.Object obj) {
28204       if (obj == this) {
28205        return true;
28206       }
28207       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException)) {
28208         return super.equals(obj);
28209       }
28210       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException) obj;
28211 
28212       boolean result = true;
28213       result = result && (hasIndex() == other.hasIndex());
28214       if (hasIndex()) {
28215         result = result && (getIndex()
28216             == other.getIndex());
28217       }
28218       result = result && (hasResult() == other.hasResult());
28219       if (hasResult()) {
28220         result = result && getResult()
28221             .equals(other.getResult());
28222       }
28223       result = result && (hasException() == other.hasException());
28224       if (hasException()) {
28225         result = result && getException()
28226             .equals(other.getException());
28227       }
28228       result = result && (hasServiceResult() == other.hasServiceResult());
28229       if (hasServiceResult()) {
28230         result = result && getServiceResult()
28231             .equals(other.getServiceResult());
28232       }
28233       result = result && (hasLoadStats() == other.hasLoadStats());
28234       if (hasLoadStats()) {
28235         result = result && getLoadStats()
28236             .equals(other.getLoadStats());
28237       }
28238       result = result &&
28239           getUnknownFields().equals(other.getUnknownFields());
28240       return result;
28241     }
28242 
28243     private int memoizedHashCode = 0;
28244     @java.lang.Override
28245     public int hashCode() {
28246       if (memoizedHashCode != 0) {
28247         return memoizedHashCode;
28248       }
28249       int hash = 41;
28250       hash = (19 * hash) + getDescriptorForType().hashCode();
28251       if (hasIndex()) {
28252         hash = (37 * hash) + INDEX_FIELD_NUMBER;
28253         hash = (53 * hash) + getIndex();
28254       }
28255       if (hasResult()) {
28256         hash = (37 * hash) + RESULT_FIELD_NUMBER;
28257         hash = (53 * hash) + getResult().hashCode();
28258       }
28259       if (hasException()) {
28260         hash = (37 * hash) + EXCEPTION_FIELD_NUMBER;
28261         hash = (53 * hash) + getException().hashCode();
28262       }
28263       if (hasServiceResult()) {
28264         hash = (37 * hash) + SERVICE_RESULT_FIELD_NUMBER;
28265         hash = (53 * hash) + getServiceResult().hashCode();
28266       }
28267       if (hasLoadStats()) {
28268         hash = (37 * hash) + LOADSTATS_FIELD_NUMBER;
28269         hash = (53 * hash) + getLoadStats().hashCode();
28270       }
28271       hash = (29 * hash) + getUnknownFields().hashCode();
28272       memoizedHashCode = hash;
28273       return hash;
28274     }
28275 
28276     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(
28277         com.google.protobuf.ByteString data)
28278         throws com.google.protobuf.InvalidProtocolBufferException {
28279       return PARSER.parseFrom(data);
28280     }
28281     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(
28282         com.google.protobuf.ByteString data,
28283         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28284         throws com.google.protobuf.InvalidProtocolBufferException {
28285       return PARSER.parseFrom(data, extensionRegistry);
28286     }
28287     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(byte[] data)
28288         throws com.google.protobuf.InvalidProtocolBufferException {
28289       return PARSER.parseFrom(data);
28290     }
28291     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(
28292         byte[] data,
28293         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28294         throws com.google.protobuf.InvalidProtocolBufferException {
28295       return PARSER.parseFrom(data, extensionRegistry);
28296     }
28297     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(java.io.InputStream input)
28298         throws java.io.IOException {
28299       return PARSER.parseFrom(input);
28300     }
28301     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(
28302         java.io.InputStream input,
28303         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28304         throws java.io.IOException {
28305       return PARSER.parseFrom(input, extensionRegistry);
28306     }
28307     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseDelimitedFrom(java.io.InputStream input)
28308         throws java.io.IOException {
28309       return PARSER.parseDelimitedFrom(input);
28310     }
28311     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseDelimitedFrom(
28312         java.io.InputStream input,
28313         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28314         throws java.io.IOException {
28315       return PARSER.parseDelimitedFrom(input, extensionRegistry);
28316     }
28317     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(
28318         com.google.protobuf.CodedInputStream input)
28319         throws java.io.IOException {
28320       return PARSER.parseFrom(input);
28321     }
28322     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(
28323         com.google.protobuf.CodedInputStream input,
28324         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28325         throws java.io.IOException {
28326       return PARSER.parseFrom(input, extensionRegistry);
28327     }
28328 
28329     public static Builder newBuilder() { return Builder.create(); }
28330     public Builder newBuilderForType() { return newBuilder(); }
28331     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException prototype) {
28332       return newBuilder().mergeFrom(prototype);
28333     }
28334     public Builder toBuilder() { return newBuilder(this); }
28335 
28336     @java.lang.Override
28337     protected Builder newBuilderForType(
28338         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
28339       Builder builder = new Builder(parent);
28340       return builder;
28341     }
28342     /**
28343      * Protobuf type {@code ResultOrException}
28344      *
28345      * <pre>
28346      **
28347      * Either a Result or an Exception NameBytesPair (keyed by
28348      * exception name whose value is the exception stringified)
28349      * or maybe empty if no result and no exception.
28350      * </pre>
28351      */
28352     public static final class Builder extends
28353         com.google.protobuf.GeneratedMessage.Builder<Builder>
28354        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder {
28355       public static final com.google.protobuf.Descriptors.Descriptor
28356           getDescriptor() {
28357         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ResultOrException_descriptor;
28358       }
28359 
28360       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
28361           internalGetFieldAccessorTable() {
28362         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ResultOrException_fieldAccessorTable
28363             .ensureFieldAccessorsInitialized(
28364                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder.class);
28365       }
28366 
28367       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.newBuilder()
28368       private Builder() {
28369         maybeForceBuilderInitialization();
28370       }
28371 
28372       private Builder(
28373           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
28374         super(parent);
28375         maybeForceBuilderInitialization();
28376       }
28377       private void maybeForceBuilderInitialization() {
28378         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
28379           getResultFieldBuilder();
28380           getExceptionFieldBuilder();
28381           getServiceResultFieldBuilder();
28382           getLoadStatsFieldBuilder();
28383         }
28384       }
28385       private static Builder create() {
28386         return new Builder();
28387       }
28388 
28389       public Builder clear() {
28390         super.clear();
28391         index_ = 0;
28392         bitField0_ = (bitField0_ & ~0x00000001);
28393         if (resultBuilder_ == null) {
28394           result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
28395         } else {
28396           resultBuilder_.clear();
28397         }
28398         bitField0_ = (bitField0_ & ~0x00000002);
28399         if (exceptionBuilder_ == null) {
28400           exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
28401         } else {
28402           exceptionBuilder_.clear();
28403         }
28404         bitField0_ = (bitField0_ & ~0x00000004);
28405         if (serviceResultBuilder_ == null) {
28406           serviceResult_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance();
28407         } else {
28408           serviceResultBuilder_.clear();
28409         }
28410         bitField0_ = (bitField0_ & ~0x00000008);
28411         if (loadStatsBuilder_ == null) {
28412           loadStats_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance();
28413         } else {
28414           loadStatsBuilder_.clear();
28415         }
28416         bitField0_ = (bitField0_ & ~0x00000010);
28417         return this;
28418       }
28419 
28420       public Builder clone() {
28421         return create().mergeFrom(buildPartial());
28422       }
28423 
28424       public com.google.protobuf.Descriptors.Descriptor
28425           getDescriptorForType() {
28426         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ResultOrException_descriptor;
28427       }
28428 
28429       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException getDefaultInstanceForType() {
28430         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.getDefaultInstance();
28431       }
28432 
28433       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException build() {
28434         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException result = buildPartial();
28435         if (!result.isInitialized()) {
28436           throw newUninitializedMessageException(result);
28437         }
28438         return result;
28439       }
28440 
28441       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException buildPartial() {
28442         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException(this);
28443         int from_bitField0_ = bitField0_;
28444         int to_bitField0_ = 0;
28445         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
28446           to_bitField0_ |= 0x00000001;
28447         }
28448         result.index_ = index_;
28449         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
28450           to_bitField0_ |= 0x00000002;
28451         }
28452         if (resultBuilder_ == null) {
28453           result.result_ = result_;
28454         } else {
28455           result.result_ = resultBuilder_.build();
28456         }
28457         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
28458           to_bitField0_ |= 0x00000004;
28459         }
28460         if (exceptionBuilder_ == null) {
28461           result.exception_ = exception_;
28462         } else {
28463           result.exception_ = exceptionBuilder_.build();
28464         }
28465         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
28466           to_bitField0_ |= 0x00000008;
28467         }
28468         if (serviceResultBuilder_ == null) {
28469           result.serviceResult_ = serviceResult_;
28470         } else {
28471           result.serviceResult_ = serviceResultBuilder_.build();
28472         }
28473         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
28474           to_bitField0_ |= 0x00000010;
28475         }
28476         if (loadStatsBuilder_ == null) {
28477           result.loadStats_ = loadStats_;
28478         } else {
28479           result.loadStats_ = loadStatsBuilder_.build();
28480         }
28481         result.bitField0_ = to_bitField0_;
28482         onBuilt();
28483         return result;
28484       }
28485 
28486       public Builder mergeFrom(com.google.protobuf.Message other) {
28487         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException) {
28488           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException)other);
28489         } else {
28490           super.mergeFrom(other);
28491           return this;
28492         }
28493       }
28494 
28495       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException other) {
28496         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.getDefaultInstance()) return this;
28497         if (other.hasIndex()) {
28498           setIndex(other.getIndex());
28499         }
28500         if (other.hasResult()) {
28501           mergeResult(other.getResult());
28502         }
28503         if (other.hasException()) {
28504           mergeException(other.getException());
28505         }
28506         if (other.hasServiceResult()) {
28507           mergeServiceResult(other.getServiceResult());
28508         }
28509         if (other.hasLoadStats()) {
28510           mergeLoadStats(other.getLoadStats());
28511         }
28512         this.mergeUnknownFields(other.getUnknownFields());
28513         return this;
28514       }
28515 
28516       public final boolean isInitialized() {
28517         if (hasException()) {
28518           if (!getException().isInitialized()) {
28519             
28520             return false;
28521           }
28522         }
28523         if (hasServiceResult()) {
28524           if (!getServiceResult().isInitialized()) {
28525             
28526             return false;
28527           }
28528         }
28529         return true;
28530       }
28531 
28532       public Builder mergeFrom(
28533           com.google.protobuf.CodedInputStream input,
28534           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28535           throws java.io.IOException {
28536         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parsedMessage = null;
28537         try {
28538           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
28539         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
28540           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException) e.getUnfinishedMessage();
28541           throw e;
28542         } finally {
28543           if (parsedMessage != null) {
28544             mergeFrom(parsedMessage);
28545           }
28546         }
28547         return this;
28548       }
28549       private int bitField0_;
28550 
28551       // optional uint32 index = 1;
28552       private int index_ ;
28553       /**
28554        * <code>optional uint32 index = 1;</code>
28555        *
28556        * <pre>
28557        * If part of a multi call, save original index of the list of all
28558        * passed so can align this response w/ original request.
28559        * </pre>
28560        */
28561       public boolean hasIndex() {
28562         return ((bitField0_ & 0x00000001) == 0x00000001);
28563       }
28564       /**
28565        * <code>optional uint32 index = 1;</code>
28566        *
28567        * <pre>
28568        * If part of a multi call, save original index of the list of all
28569        * passed so can align this response w/ original request.
28570        * </pre>
28571        */
28572       public int getIndex() {
28573         return index_;
28574       }
28575       /**
28576        * <code>optional uint32 index = 1;</code>
28577        *
28578        * <pre>
28579        * If part of a multi call, save original index of the list of all
28580        * passed so can align this response w/ original request.
28581        * </pre>
28582        */
28583       public Builder setIndex(int value) {
28584         bitField0_ |= 0x00000001;
28585         index_ = value;
28586         onChanged();
28587         return this;
28588       }
28589       /**
28590        * <code>optional uint32 index = 1;</code>
28591        *
28592        * <pre>
28593        * If part of a multi call, save original index of the list of all
28594        * passed so can align this response w/ original request.
28595        * </pre>
28596        */
28597       public Builder clearIndex() {
28598         bitField0_ = (bitField0_ & ~0x00000001);
28599         index_ = 0;
28600         onChanged();
28601         return this;
28602       }
28603 
28604       // optional .Result result = 2;
28605       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
28606       private com.google.protobuf.SingleFieldBuilder<
28607           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_;
28608       /**
28609        * <code>optional .Result result = 2;</code>
28610        */
28611       public boolean hasResult() {
28612         return ((bitField0_ & 0x00000002) == 0x00000002);
28613       }
28614       /**
28615        * <code>optional .Result result = 2;</code>
28616        */
28617       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() {
28618         if (resultBuilder_ == null) {
28619           return result_;
28620         } else {
28621           return resultBuilder_.getMessage();
28622         }
28623       }
28624       /**
28625        * <code>optional .Result result = 2;</code>
28626        */
28627       public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
28628         if (resultBuilder_ == null) {
28629           if (value == null) {
28630             throw new NullPointerException();
28631           }
28632           result_ = value;
28633           onChanged();
28634         } else {
28635           resultBuilder_.setMessage(value);
28636         }
28637         bitField0_ |= 0x00000002;
28638         return this;
28639       }
28640       /**
28641        * <code>optional .Result result = 2;</code>
28642        */
28643       public Builder setResult(
28644           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
28645         if (resultBuilder_ == null) {
28646           result_ = builderForValue.build();
28647           onChanged();
28648         } else {
28649           resultBuilder_.setMessage(builderForValue.build());
28650         }
28651         bitField0_ |= 0x00000002;
28652         return this;
28653       }
28654       /**
28655        * <code>optional .Result result = 2;</code>
28656        */
28657       public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
28658         if (resultBuilder_ == null) {
28659           if (((bitField0_ & 0x00000002) == 0x00000002) &&
28660               result_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) {
28661             result_ =
28662               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial();
28663           } else {
28664             result_ = value;
28665           }
28666           onChanged();
28667         } else {
28668           resultBuilder_.mergeFrom(value);
28669         }
28670         bitField0_ |= 0x00000002;
28671         return this;
28672       }
28673       /**
28674        * <code>optional .Result result = 2;</code>
28675        */
28676       public Builder clearResult() {
28677         if (resultBuilder_ == null) {
28678           result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
28679           onChanged();
28680         } else {
28681           resultBuilder_.clear();
28682         }
28683         bitField0_ = (bitField0_ & ~0x00000002);
28684         return this;
28685       }
28686       /**
28687        * <code>optional .Result result = 2;</code>
28688        */
28689       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() {
28690         bitField0_ |= 0x00000002;
28691         onChanged();
28692         return getResultFieldBuilder().getBuilder();
28693       }
28694       /**
28695        * <code>optional .Result result = 2;</code>
28696        */
28697       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() {
28698         if (resultBuilder_ != null) {
28699           return resultBuilder_.getMessageOrBuilder();
28700         } else {
28701           return result_;
28702         }
28703       }
28704       /**
28705        * <code>optional .Result result = 2;</code>
28706        */
28707       private com.google.protobuf.SingleFieldBuilder<
28708           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> 
28709           getResultFieldBuilder() {
28710         if (resultBuilder_ == null) {
28711           resultBuilder_ = new com.google.protobuf.SingleFieldBuilder<
28712               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>(
28713                   result_,
28714                   getParentForChildren(),
28715                   isClean());
28716           result_ = null;
28717         }
28718         return resultBuilder_;
28719       }
28720 
28721       // optional .NameBytesPair exception = 3;
28722       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
28723       private com.google.protobuf.SingleFieldBuilder<
28724           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> exceptionBuilder_;
28725       /**
28726        * <code>optional .NameBytesPair exception = 3;</code>
28727        */
28728       public boolean hasException() {
28729         return ((bitField0_ & 0x00000004) == 0x00000004);
28730       }
28731       /**
28732        * <code>optional .NameBytesPair exception = 3;</code>
28733        */
28734       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() {
28735         if (exceptionBuilder_ == null) {
28736           return exception_;
28737         } else {
28738           return exceptionBuilder_.getMessage();
28739         }
28740       }
28741       /**
28742        * <code>optional .NameBytesPair exception = 3;</code>
28743        */
28744       public Builder setException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
28745         if (exceptionBuilder_ == null) {
28746           if (value == null) {
28747             throw new NullPointerException();
28748           }
28749           exception_ = value;
28750           onChanged();
28751         } else {
28752           exceptionBuilder_.setMessage(value);
28753         }
28754         bitField0_ |= 0x00000004;
28755         return this;
28756       }
28757       /**
28758        * <code>optional .NameBytesPair exception = 3;</code>
28759        */
28760       public Builder setException(
28761           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
28762         if (exceptionBuilder_ == null) {
28763           exception_ = builderForValue.build();
28764           onChanged();
28765         } else {
28766           exceptionBuilder_.setMessage(builderForValue.build());
28767         }
28768         bitField0_ |= 0x00000004;
28769         return this;
28770       }
28771       /**
28772        * <code>optional .NameBytesPair exception = 3;</code>
28773        */
28774       public Builder mergeException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
28775         if (exceptionBuilder_ == null) {
28776           if (((bitField0_ & 0x00000004) == 0x00000004) &&
28777               exception_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) {
28778             exception_ =
28779               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(exception_).mergeFrom(value).buildPartial();
28780           } else {
28781             exception_ = value;
28782           }
28783           onChanged();
28784         } else {
28785           exceptionBuilder_.mergeFrom(value);
28786         }
28787         bitField0_ |= 0x00000004;
28788         return this;
28789       }
28790       /**
28791        * <code>optional .NameBytesPair exception = 3;</code>
28792        */
28793       public Builder clearException() {
28794         if (exceptionBuilder_ == null) {
28795           exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
28796           onChanged();
28797         } else {
28798           exceptionBuilder_.clear();
28799         }
28800         bitField0_ = (bitField0_ & ~0x00000004);
28801         return this;
28802       }
28803       /**
28804        * <code>optional .NameBytesPair exception = 3;</code>
28805        */
28806       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getExceptionBuilder() {
28807         bitField0_ |= 0x00000004;
28808         onChanged();
28809         return getExceptionFieldBuilder().getBuilder();
28810       }
28811       /**
28812        * <code>optional .NameBytesPair exception = 3;</code>
28813        */
28814       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() {
28815         if (exceptionBuilder_ != null) {
28816           return exceptionBuilder_.getMessageOrBuilder();
28817         } else {
28818           return exception_;
28819         }
28820       }
28821       /**
28822        * <code>optional .NameBytesPair exception = 3;</code>
28823        */
28824       private com.google.protobuf.SingleFieldBuilder<
28825           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
28826           getExceptionFieldBuilder() {
28827         if (exceptionBuilder_ == null) {
28828           exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
28829               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
28830                   exception_,
28831                   getParentForChildren(),
28832                   isClean());
28833           exception_ = null;
28834         }
28835         return exceptionBuilder_;
28836       }
28837 
28838       // optional .CoprocessorServiceResult service_result = 4;
28839       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult serviceResult_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance();
28840       private com.google.protobuf.SingleFieldBuilder<
28841           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder> serviceResultBuilder_;
28842       /**
28843        * <code>optional .CoprocessorServiceResult service_result = 4;</code>
28844        *
28845        * <pre>
28846        * result if this was a coprocessor service call
28847        * </pre>
28848        */
28849       public boolean hasServiceResult() {
28850         return ((bitField0_ & 0x00000008) == 0x00000008);
28851       }
28852       /**
28853        * <code>optional .CoprocessorServiceResult service_result = 4;</code>
28854        *
28855        * <pre>
28856        * result if this was a coprocessor service call
28857        * </pre>
28858        */
28859       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult getServiceResult() {
28860         if (serviceResultBuilder_ == null) {
28861           return serviceResult_;
28862         } else {
28863           return serviceResultBuilder_.getMessage();
28864         }
28865       }
28866       /**
28867        * <code>optional .CoprocessorServiceResult service_result = 4;</code>
28868        *
28869        * <pre>
28870        * result if this was a coprocessor service call
28871        * </pre>
28872        */
28873       public Builder setServiceResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult value) {
28874         if (serviceResultBuilder_ == null) {
28875           if (value == null) {
28876             throw new NullPointerException();
28877           }
28878           serviceResult_ = value;
28879           onChanged();
28880         } else {
28881           serviceResultBuilder_.setMessage(value);
28882         }
28883         bitField0_ |= 0x00000008;
28884         return this;
28885       }
28886       /**
28887        * <code>optional .CoprocessorServiceResult service_result = 4;</code>
28888        *
28889        * <pre>
28890        * result if this was a coprocessor service call
28891        * </pre>
28892        */
28893       public Builder setServiceResult(
28894           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder builderForValue) {
28895         if (serviceResultBuilder_ == null) {
28896           serviceResult_ = builderForValue.build();
28897           onChanged();
28898         } else {
28899           serviceResultBuilder_.setMessage(builderForValue.build());
28900         }
28901         bitField0_ |= 0x00000008;
28902         return this;
28903       }
28904       /**
28905        * <code>optional .CoprocessorServiceResult service_result = 4;</code>
28906        *
28907        * <pre>
28908        * result if this was a coprocessor service call
28909        * </pre>
28910        */
28911       public Builder mergeServiceResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult value) {
28912         if (serviceResultBuilder_ == null) {
28913           if (((bitField0_ & 0x00000008) == 0x00000008) &&
28914               serviceResult_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance()) {
28915             serviceResult_ =
28916               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.newBuilder(serviceResult_).mergeFrom(value).buildPartial();
28917           } else {
28918             serviceResult_ = value;
28919           }
28920           onChanged();
28921         } else {
28922           serviceResultBuilder_.mergeFrom(value);
28923         }
28924         bitField0_ |= 0x00000008;
28925         return this;
28926       }
28927       /**
28928        * <code>optional .CoprocessorServiceResult service_result = 4;</code>
28929        *
28930        * <pre>
28931        * result if this was a coprocessor service call
28932        * </pre>
28933        */
28934       public Builder clearServiceResult() {
28935         if (serviceResultBuilder_ == null) {
28936           serviceResult_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance();
28937           onChanged();
28938         } else {
28939           serviceResultBuilder_.clear();
28940         }
28941         bitField0_ = (bitField0_ & ~0x00000008);
28942         return this;
28943       }
28944       /**
28945        * <code>optional .CoprocessorServiceResult service_result = 4;</code>
28946        *
28947        * <pre>
28948        * result if this was a coprocessor service call
28949        * </pre>
28950        */
28951       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder getServiceResultBuilder() {
28952         bitField0_ |= 0x00000008;
28953         onChanged();
28954         return getServiceResultFieldBuilder().getBuilder();
28955       }
28956       /**
28957        * <code>optional .CoprocessorServiceResult service_result = 4;</code>
28958        *
28959        * <pre>
28960        * result if this was a coprocessor service call
28961        * </pre>
28962        */
28963       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder getServiceResultOrBuilder() {
28964         if (serviceResultBuilder_ != null) {
28965           return serviceResultBuilder_.getMessageOrBuilder();
28966         } else {
28967           return serviceResult_;
28968         }
28969       }
28970       /**
28971        * <code>optional .CoprocessorServiceResult service_result = 4;</code>
28972        *
28973        * <pre>
28974        * result if this was a coprocessor service call
28975        * </pre>
28976        */
28977       private com.google.protobuf.SingleFieldBuilder<
28978           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder> 
28979           getServiceResultFieldBuilder() {
28980         if (serviceResultBuilder_ == null) {
28981           serviceResultBuilder_ = new com.google.protobuf.SingleFieldBuilder<
28982               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder>(
28983                   serviceResult_,
28984                   getParentForChildren(),
28985                   isClean());
28986           serviceResult_ = null;
28987         }
28988         return serviceResultBuilder_;
28989       }
28990 
28991       // optional .RegionLoadStats loadStats = 5;
28992       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats loadStats_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance();
28993       private com.google.protobuf.SingleFieldBuilder<
28994           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder> loadStatsBuilder_;
28995       /**
28996        * <code>optional .RegionLoadStats loadStats = 5;</code>
28997        *
28998        * <pre>
28999        * current load on the region
29000        * </pre>
29001        */
29002       public boolean hasLoadStats() {
29003         return ((bitField0_ & 0x00000010) == 0x00000010);
29004       }
29005       /**
29006        * <code>optional .RegionLoadStats loadStats = 5;</code>
29007        *
29008        * <pre>
29009        * current load on the region
29010        * </pre>
29011        */
29012       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats getLoadStats() {
29013         if (loadStatsBuilder_ == null) {
29014           return loadStats_;
29015         } else {
29016           return loadStatsBuilder_.getMessage();
29017         }
29018       }
29019       /**
29020        * <code>optional .RegionLoadStats loadStats = 5;</code>
29021        *
29022        * <pre>
29023        * current load on the region
29024        * </pre>
29025        */
29026       public Builder setLoadStats(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats value) {
29027         if (loadStatsBuilder_ == null) {
29028           if (value == null) {
29029             throw new NullPointerException();
29030           }
29031           loadStats_ = value;
29032           onChanged();
29033         } else {
29034           loadStatsBuilder_.setMessage(value);
29035         }
29036         bitField0_ |= 0x00000010;
29037         return this;
29038       }
29039       /**
29040        * <code>optional .RegionLoadStats loadStats = 5;</code>
29041        *
29042        * <pre>
29043        * current load on the region
29044        * </pre>
29045        */
29046       public Builder setLoadStats(
29047           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder builderForValue) {
29048         if (loadStatsBuilder_ == null) {
29049           loadStats_ = builderForValue.build();
29050           onChanged();
29051         } else {
29052           loadStatsBuilder_.setMessage(builderForValue.build());
29053         }
29054         bitField0_ |= 0x00000010;
29055         return this;
29056       }
29057       /**
29058        * <code>optional .RegionLoadStats loadStats = 5;</code>
29059        *
29060        * <pre>
29061        * current load on the region
29062        * </pre>
29063        */
29064       public Builder mergeLoadStats(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats value) {
29065         if (loadStatsBuilder_ == null) {
29066           if (((bitField0_ & 0x00000010) == 0x00000010) &&
29067               loadStats_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance()) {
29068             loadStats_ =
29069               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.newBuilder(loadStats_).mergeFrom(value).buildPartial();
29070           } else {
29071             loadStats_ = value;
29072           }
29073           onChanged();
29074         } else {
29075           loadStatsBuilder_.mergeFrom(value);
29076         }
29077         bitField0_ |= 0x00000010;
29078         return this;
29079       }
29080       /**
29081        * <code>optional .RegionLoadStats loadStats = 5;</code>
29082        *
29083        * <pre>
29084        * current load on the region
29085        * </pre>
29086        */
29087       public Builder clearLoadStats() {
29088         if (loadStatsBuilder_ == null) {
29089           loadStats_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance();
29090           onChanged();
29091         } else {
29092           loadStatsBuilder_.clear();
29093         }
29094         bitField0_ = (bitField0_ & ~0x00000010);
29095         return this;
29096       }
29097       /**
29098        * <code>optional .RegionLoadStats loadStats = 5;</code>
29099        *
29100        * <pre>
29101        * current load on the region
29102        * </pre>
29103        */
29104       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder getLoadStatsBuilder() {
29105         bitField0_ |= 0x00000010;
29106         onChanged();
29107         return getLoadStatsFieldBuilder().getBuilder();
29108       }
29109       /**
29110        * <code>optional .RegionLoadStats loadStats = 5;</code>
29111        *
29112        * <pre>
29113        * current load on the region
29114        * </pre>
29115        */
29116       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder getLoadStatsOrBuilder() {
29117         if (loadStatsBuilder_ != null) {
29118           return loadStatsBuilder_.getMessageOrBuilder();
29119         } else {
29120           return loadStats_;
29121         }
29122       }
29123       /**
29124        * <code>optional .RegionLoadStats loadStats = 5;</code>
29125        *
29126        * <pre>
29127        * current load on the region
29128        * </pre>
29129        */
29130       private com.google.protobuf.SingleFieldBuilder<
29131           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder> 
29132           getLoadStatsFieldBuilder() {
29133         if (loadStatsBuilder_ == null) {
29134           loadStatsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
29135               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder>(
29136                   loadStats_,
29137                   getParentForChildren(),
29138                   isClean());
29139           loadStats_ = null;
29140         }
29141         return loadStatsBuilder_;
29142       }
29143 
29144       // @@protoc_insertion_point(builder_scope:ResultOrException)
29145     }
29146 
29147     static {
29148       defaultInstance = new ResultOrException(true);
29149       defaultInstance.initFields();
29150     }
29151 
29152     // @@protoc_insertion_point(class_scope:ResultOrException)
29153   }
29154 
29155   public interface RegionActionResultOrBuilder
29156       extends com.google.protobuf.MessageOrBuilder {
29157 
29158     // repeated .ResultOrException resultOrException = 1;
29159     /**
29160      * <code>repeated .ResultOrException resultOrException = 1;</code>
29161      */
29162     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException> 
29163         getResultOrExceptionList();
29164     /**
29165      * <code>repeated .ResultOrException resultOrException = 1;</code>
29166      */
29167     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException getResultOrException(int index);
29168     /**
29169      * <code>repeated .ResultOrException resultOrException = 1;</code>
29170      */
29171     int getResultOrExceptionCount();
29172     /**
29173      * <code>repeated .ResultOrException resultOrException = 1;</code>
29174      */
29175     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder> 
29176         getResultOrExceptionOrBuilderList();
29177     /**
29178      * <code>repeated .ResultOrException resultOrException = 1;</code>
29179      */
29180     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder getResultOrExceptionOrBuilder(
29181         int index);
29182 
29183     // optional .NameBytesPair exception = 2;
29184     /**
29185      * <code>optional .NameBytesPair exception = 2;</code>
29186      *
29187      * <pre>
29188      * If the operation failed globally for this region, this exception is set
29189      * </pre>
29190      */
29191     boolean hasException();
29192     /**
29193      * <code>optional .NameBytesPair exception = 2;</code>
29194      *
29195      * <pre>
29196      * If the operation failed globally for this region, this exception is set
29197      * </pre>
29198      */
29199     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException();
29200     /**
29201      * <code>optional .NameBytesPair exception = 2;</code>
29202      *
29203      * <pre>
29204      * If the operation failed globally for this region, this exception is set
29205      * </pre>
29206      */
29207     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder();
29208   }
29209   /**
29210    * Protobuf type {@code RegionActionResult}
29211    *
29212    * <pre>
29213    **
29214    * The result of a RegionAction.
29215    * </pre>
29216    */
29217   public static final class RegionActionResult extends
29218       com.google.protobuf.GeneratedMessage
29219       implements RegionActionResultOrBuilder {
29220     // Use RegionActionResult.newBuilder() to construct.
29221     private RegionActionResult(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
29222       super(builder);
29223       this.unknownFields = builder.getUnknownFields();
29224     }
29225     private RegionActionResult(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
29226 
29227     private static final RegionActionResult defaultInstance;
29228     public static RegionActionResult getDefaultInstance() {
29229       return defaultInstance;
29230     }
29231 
29232     public RegionActionResult getDefaultInstanceForType() {
29233       return defaultInstance;
29234     }
29235 
29236     private final com.google.protobuf.UnknownFieldSet unknownFields;
29237     @java.lang.Override
29238     public final com.google.protobuf.UnknownFieldSet
29239         getUnknownFields() {
29240       return this.unknownFields;
29241     }
29242     private RegionActionResult(
29243         com.google.protobuf.CodedInputStream input,
29244         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29245         throws com.google.protobuf.InvalidProtocolBufferException {
29246       initFields();
29247       int mutable_bitField0_ = 0;
29248       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
29249           com.google.protobuf.UnknownFieldSet.newBuilder();
29250       try {
29251         boolean done = false;
29252         while (!done) {
29253           int tag = input.readTag();
29254           switch (tag) {
29255             case 0:
29256               done = true;
29257               break;
29258             default: {
29259               if (!parseUnknownField(input, unknownFields,
29260                                      extensionRegistry, tag)) {
29261                 done = true;
29262               }
29263               break;
29264             }
29265             case 10: {
29266               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
29267                 resultOrException_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException>();
29268                 mutable_bitField0_ |= 0x00000001;
29269               }
29270               resultOrException_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.PARSER, extensionRegistry));
29271               break;
29272             }
29273             case 18: {
29274               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null;
29275               if (((bitField0_ & 0x00000001) == 0x00000001)) {
29276                 subBuilder = exception_.toBuilder();
29277               }
29278               exception_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry);
29279               if (subBuilder != null) {
29280                 subBuilder.mergeFrom(exception_);
29281                 exception_ = subBuilder.buildPartial();
29282               }
29283               bitField0_ |= 0x00000001;
29284               break;
29285             }
29286           }
29287         }
29288       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
29289         throw e.setUnfinishedMessage(this);
29290       } catch (java.io.IOException e) {
29291         throw new com.google.protobuf.InvalidProtocolBufferException(
29292             e.getMessage()).setUnfinishedMessage(this);
29293       } finally {
29294         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
29295           resultOrException_ = java.util.Collections.unmodifiableList(resultOrException_);
29296         }
29297         this.unknownFields = unknownFields.build();
29298         makeExtensionsImmutable();
29299       }
29300     }
29301     public static final com.google.protobuf.Descriptors.Descriptor
29302         getDescriptor() {
29303       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionActionResult_descriptor;
29304     }
29305 
29306     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
29307         internalGetFieldAccessorTable() {
29308       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionActionResult_fieldAccessorTable
29309           .ensureFieldAccessorsInitialized(
29310               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder.class);
29311     }
29312 
29313     public static com.google.protobuf.Parser<RegionActionResult> PARSER =
29314         new com.google.protobuf.AbstractParser<RegionActionResult>() {
29315       public RegionActionResult parsePartialFrom(
29316           com.google.protobuf.CodedInputStream input,
29317           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29318           throws com.google.protobuf.InvalidProtocolBufferException {
29319         return new RegionActionResult(input, extensionRegistry);
29320       }
29321     };
29322 
29323     @java.lang.Override
29324     public com.google.protobuf.Parser<RegionActionResult> getParserForType() {
29325       return PARSER;
29326     }
29327 
29328     private int bitField0_;
29329     // repeated .ResultOrException resultOrException = 1;
29330     public static final int RESULTOREXCEPTION_FIELD_NUMBER = 1;
29331     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException> resultOrException_;
29332     /**
29333      * <code>repeated .ResultOrException resultOrException = 1;</code>
29334      */
29335     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException> getResultOrExceptionList() {
29336       return resultOrException_;
29337     }
29338     /**
29339      * <code>repeated .ResultOrException resultOrException = 1;</code>
29340      */
29341     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder> 
29342         getResultOrExceptionOrBuilderList() {
29343       return resultOrException_;
29344     }
29345     /**
29346      * <code>repeated .ResultOrException resultOrException = 1;</code>
29347      */
29348     public int getResultOrExceptionCount() {
29349       return resultOrException_.size();
29350     }
29351     /**
29352      * <code>repeated .ResultOrException resultOrException = 1;</code>
29353      */
29354     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException getResultOrException(int index) {
29355       return resultOrException_.get(index);
29356     }
29357     /**
29358      * <code>repeated .ResultOrException resultOrException = 1;</code>
29359      */
29360     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder getResultOrExceptionOrBuilder(
29361         int index) {
29362       return resultOrException_.get(index);
29363     }
29364 
29365     // optional .NameBytesPair exception = 2;
29366     public static final int EXCEPTION_FIELD_NUMBER = 2;
29367     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_;
29368     /**
29369      * <code>optional .NameBytesPair exception = 2;</code>
29370      *
29371      * <pre>
29372      * If the operation failed globally for this region, this exception is set
29373      * </pre>
29374      */
29375     public boolean hasException() {
29376       return ((bitField0_ & 0x00000001) == 0x00000001);
29377     }
29378     /**
29379      * <code>optional .NameBytesPair exception = 2;</code>
29380      *
29381      * <pre>
29382      * If the operation failed globally for this region, this exception is set
29383      * </pre>
29384      */
29385     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() {
29386       return exception_;
29387     }
29388     /**
29389      * <code>optional .NameBytesPair exception = 2;</code>
29390      *
29391      * <pre>
29392      * If the operation failed globally for this region, this exception is set
29393      * </pre>
29394      */
29395     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() {
29396       return exception_;
29397     }
29398 
29399     private void initFields() {
29400       resultOrException_ = java.util.Collections.emptyList();
29401       exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
29402     }
29403     private byte memoizedIsInitialized = -1;
29404     public final boolean isInitialized() {
29405       byte isInitialized = memoizedIsInitialized;
29406       if (isInitialized != -1) return isInitialized == 1;
29407 
29408       for (int i = 0; i < getResultOrExceptionCount(); i++) {
29409         if (!getResultOrException(i).isInitialized()) {
29410           memoizedIsInitialized = 0;
29411           return false;
29412         }
29413       }
29414       if (hasException()) {
29415         if (!getException().isInitialized()) {
29416           memoizedIsInitialized = 0;
29417           return false;
29418         }
29419       }
29420       memoizedIsInitialized = 1;
29421       return true;
29422     }
29423 
29424     public void writeTo(com.google.protobuf.CodedOutputStream output)
29425                         throws java.io.IOException {
29426       getSerializedSize();
29427       for (int i = 0; i < resultOrException_.size(); i++) {
29428         output.writeMessage(1, resultOrException_.get(i));
29429       }
29430       if (((bitField0_ & 0x00000001) == 0x00000001)) {
29431         output.writeMessage(2, exception_);
29432       }
29433       getUnknownFields().writeTo(output);
29434     }
29435 
29436     private int memoizedSerializedSize = -1;
29437     public int getSerializedSize() {
29438       int size = memoizedSerializedSize;
29439       if (size != -1) return size;
29440 
29441       size = 0;
29442       for (int i = 0; i < resultOrException_.size(); i++) {
29443         size += com.google.protobuf.CodedOutputStream
29444           .computeMessageSize(1, resultOrException_.get(i));
29445       }
29446       if (((bitField0_ & 0x00000001) == 0x00000001)) {
29447         size += com.google.protobuf.CodedOutputStream
29448           .computeMessageSize(2, exception_);
29449       }
29450       size += getUnknownFields().getSerializedSize();
29451       memoizedSerializedSize = size;
29452       return size;
29453     }
29454 
29455     private static final long serialVersionUID = 0L;
29456     @java.lang.Override
29457     protected java.lang.Object writeReplace()
29458         throws java.io.ObjectStreamException {
29459       return super.writeReplace();
29460     }
29461 
29462     @java.lang.Override
29463     public boolean equals(final java.lang.Object obj) {
29464       if (obj == this) {
29465        return true;
29466       }
29467       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult)) {
29468         return super.equals(obj);
29469       }
29470       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult) obj;
29471 
29472       boolean result = true;
29473       result = result && getResultOrExceptionList()
29474           .equals(other.getResultOrExceptionList());
29475       result = result && (hasException() == other.hasException());
29476       if (hasException()) {
29477         result = result && getException()
29478             .equals(other.getException());
29479       }
29480       result = result &&
29481           getUnknownFields().equals(other.getUnknownFields());
29482       return result;
29483     }
29484 
29485     private int memoizedHashCode = 0;
29486     @java.lang.Override
29487     public int hashCode() {
29488       if (memoizedHashCode != 0) {
29489         return memoizedHashCode;
29490       }
29491       int hash = 41;
29492       hash = (19 * hash) + getDescriptorForType().hashCode();
29493       if (getResultOrExceptionCount() > 0) {
29494         hash = (37 * hash) + RESULTOREXCEPTION_FIELD_NUMBER;
29495         hash = (53 * hash) + getResultOrExceptionList().hashCode();
29496       }
29497       if (hasException()) {
29498         hash = (37 * hash) + EXCEPTION_FIELD_NUMBER;
29499         hash = (53 * hash) + getException().hashCode();
29500       }
29501       hash = (29 * hash) + getUnknownFields().hashCode();
29502       memoizedHashCode = hash;
29503       return hash;
29504     }
29505 
29506     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(
29507         com.google.protobuf.ByteString data)
29508         throws com.google.protobuf.InvalidProtocolBufferException {
29509       return PARSER.parseFrom(data);
29510     }
29511     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(
29512         com.google.protobuf.ByteString data,
29513         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29514         throws com.google.protobuf.InvalidProtocolBufferException {
29515       return PARSER.parseFrom(data, extensionRegistry);
29516     }
29517     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(byte[] data)
29518         throws com.google.protobuf.InvalidProtocolBufferException {
29519       return PARSER.parseFrom(data);
29520     }
29521     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(
29522         byte[] data,
29523         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29524         throws com.google.protobuf.InvalidProtocolBufferException {
29525       return PARSER.parseFrom(data, extensionRegistry);
29526     }
29527     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(java.io.InputStream input)
29528         throws java.io.IOException {
29529       return PARSER.parseFrom(input);
29530     }
29531     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(
29532         java.io.InputStream input,
29533         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29534         throws java.io.IOException {
29535       return PARSER.parseFrom(input, extensionRegistry);
29536     }
29537     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseDelimitedFrom(java.io.InputStream input)
29538         throws java.io.IOException {
29539       return PARSER.parseDelimitedFrom(input);
29540     }
29541     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseDelimitedFrom(
29542         java.io.InputStream input,
29543         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29544         throws java.io.IOException {
29545       return PARSER.parseDelimitedFrom(input, extensionRegistry);
29546     }
29547     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(
29548         com.google.protobuf.CodedInputStream input)
29549         throws java.io.IOException {
29550       return PARSER.parseFrom(input);
29551     }
29552     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(
29553         com.google.protobuf.CodedInputStream input,
29554         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29555         throws java.io.IOException {
29556       return PARSER.parseFrom(input, extensionRegistry);
29557     }
29558 
29559     public static Builder newBuilder() { return Builder.create(); }
29560     public Builder newBuilderForType() { return newBuilder(); }
29561     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult prototype) {
29562       return newBuilder().mergeFrom(prototype);
29563     }
29564     public Builder toBuilder() { return newBuilder(this); }
29565 
29566     @java.lang.Override
29567     protected Builder newBuilderForType(
29568         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
29569       Builder builder = new Builder(parent);
29570       return builder;
29571     }
29572     /**
29573      * Protobuf type {@code RegionActionResult}
29574      *
29575      * <pre>
29576      **
29577      * The result of a RegionAction.
29578      * </pre>
29579      */
29580     public static final class Builder extends
29581         com.google.protobuf.GeneratedMessage.Builder<Builder>
29582        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder {
29583       public static final com.google.protobuf.Descriptors.Descriptor
29584           getDescriptor() {
29585         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionActionResult_descriptor;
29586       }
29587 
29588       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
29589           internalGetFieldAccessorTable() {
29590         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionActionResult_fieldAccessorTable
29591             .ensureFieldAccessorsInitialized(
29592                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder.class);
29593       }
29594 
29595       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.newBuilder()
29596       private Builder() {
29597         maybeForceBuilderInitialization();
29598       }
29599 
29600       private Builder(
29601           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
29602         super(parent);
29603         maybeForceBuilderInitialization();
29604       }
29605       private void maybeForceBuilderInitialization() {
29606         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
29607           getResultOrExceptionFieldBuilder();
29608           getExceptionFieldBuilder();
29609         }
29610       }
29611       private static Builder create() {
29612         return new Builder();
29613       }
29614 
29615       public Builder clear() {
29616         super.clear();
29617         if (resultOrExceptionBuilder_ == null) {
29618           resultOrException_ = java.util.Collections.emptyList();
29619           bitField0_ = (bitField0_ & ~0x00000001);
29620         } else {
29621           resultOrExceptionBuilder_.clear();
29622         }
29623         if (exceptionBuilder_ == null) {
29624           exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
29625         } else {
29626           exceptionBuilder_.clear();
29627         }
29628         bitField0_ = (bitField0_ & ~0x00000002);
29629         return this;
29630       }
29631 
29632       public Builder clone() {
29633         return create().mergeFrom(buildPartial());
29634       }
29635 
29636       public com.google.protobuf.Descriptors.Descriptor
29637           getDescriptorForType() {
29638         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionActionResult_descriptor;
29639       }
29640 
29641       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getDefaultInstanceForType() {
29642         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance();
29643       }
29644 
29645       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult build() {
29646         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult result = buildPartial();
29647         if (!result.isInitialized()) {
29648           throw newUninitializedMessageException(result);
29649         }
29650         return result;
29651       }
29652 
29653       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult buildPartial() {
29654         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult(this);
29655         int from_bitField0_ = bitField0_;
29656         int to_bitField0_ = 0;
29657         if (resultOrExceptionBuilder_ == null) {
29658           if (((bitField0_ & 0x00000001) == 0x00000001)) {
29659             resultOrException_ = java.util.Collections.unmodifiableList(resultOrException_);
29660             bitField0_ = (bitField0_ & ~0x00000001);
29661           }
29662           result.resultOrException_ = resultOrException_;
29663         } else {
29664           result.resultOrException_ = resultOrExceptionBuilder_.build();
29665         }
29666         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
29667           to_bitField0_ |= 0x00000001;
29668         }
29669         if (exceptionBuilder_ == null) {
29670           result.exception_ = exception_;
29671         } else {
29672           result.exception_ = exceptionBuilder_.build();
29673         }
29674         result.bitField0_ = to_bitField0_;
29675         onBuilt();
29676         return result;
29677       }
29678 
29679       public Builder mergeFrom(com.google.protobuf.Message other) {
29680         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult) {
29681           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult)other);
29682         } else {
29683           super.mergeFrom(other);
29684           return this;
29685         }
29686       }
29687 
29688       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult other) {
29689         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance()) return this;
29690         if (resultOrExceptionBuilder_ == null) {
29691           if (!other.resultOrException_.isEmpty()) {
29692             if (resultOrException_.isEmpty()) {
29693               resultOrException_ = other.resultOrException_;
29694               bitField0_ = (bitField0_ & ~0x00000001);
29695             } else {
29696               ensureResultOrExceptionIsMutable();
29697               resultOrException_.addAll(other.resultOrException_);
29698             }
29699             onChanged();
29700           }
29701         } else {
29702           if (!other.resultOrException_.isEmpty()) {
29703             if (resultOrExceptionBuilder_.isEmpty()) {
29704               resultOrExceptionBuilder_.dispose();
29705               resultOrExceptionBuilder_ = null;
29706               resultOrException_ = other.resultOrException_;
29707               bitField0_ = (bitField0_ & ~0x00000001);
29708               resultOrExceptionBuilder_ = 
29709                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
29710                    getResultOrExceptionFieldBuilder() : null;
29711             } else {
29712               resultOrExceptionBuilder_.addAllMessages(other.resultOrException_);
29713             }
29714           }
29715         }
29716         if (other.hasException()) {
29717           mergeException(other.getException());
29718         }
29719         this.mergeUnknownFields(other.getUnknownFields());
29720         return this;
29721       }
29722 
29723       public final boolean isInitialized() {
29724         for (int i = 0; i < getResultOrExceptionCount(); i++) {
29725           if (!getResultOrException(i).isInitialized()) {
29726             
29727             return false;
29728           }
29729         }
29730         if (hasException()) {
29731           if (!getException().isInitialized()) {
29732             
29733             return false;
29734           }
29735         }
29736         return true;
29737       }
29738 
29739       public Builder mergeFrom(
29740           com.google.protobuf.CodedInputStream input,
29741           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29742           throws java.io.IOException {
29743         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parsedMessage = null;
29744         try {
29745           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
29746         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
29747           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult) e.getUnfinishedMessage();
29748           throw e;
29749         } finally {
29750           if (parsedMessage != null) {
29751             mergeFrom(parsedMessage);
29752           }
29753         }
29754         return this;
29755       }
29756       private int bitField0_;
29757 
29758       // repeated .ResultOrException resultOrException = 1;
29759       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException> resultOrException_ =
29760         java.util.Collections.emptyList();
29761       private void ensureResultOrExceptionIsMutable() {
29762         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
29763           resultOrException_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException>(resultOrException_);
29764           bitField0_ |= 0x00000001;
29765          }
29766       }
29767 
29768       private com.google.protobuf.RepeatedFieldBuilder<
29769           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder> resultOrExceptionBuilder_;
29770 
29771       /**
29772        * <code>repeated .ResultOrException resultOrException = 1;</code>
29773        */
29774       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException> getResultOrExceptionList() {
29775         if (resultOrExceptionBuilder_ == null) {
29776           return java.util.Collections.unmodifiableList(resultOrException_);
29777         } else {
29778           return resultOrExceptionBuilder_.getMessageList();
29779         }
29780       }
29781       /**
29782        * <code>repeated .ResultOrException resultOrException = 1;</code>
29783        */
29784       public int getResultOrExceptionCount() {
29785         if (resultOrExceptionBuilder_ == null) {
29786           return resultOrException_.size();
29787         } else {
29788           return resultOrExceptionBuilder_.getCount();
29789         }
29790       }
29791       /**
29792        * <code>repeated .ResultOrException resultOrException = 1;</code>
29793        */
29794       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException getResultOrException(int index) {
29795         if (resultOrExceptionBuilder_ == null) {
29796           return resultOrException_.get(index);
29797         } else {
29798           return resultOrExceptionBuilder_.getMessage(index);
29799         }
29800       }
29801       /**
29802        * <code>repeated .ResultOrException resultOrException = 1;</code>
29803        */
29804       public Builder setResultOrException(
29805           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException value) {
29806         if (resultOrExceptionBuilder_ == null) {
29807           if (value == null) {
29808             throw new NullPointerException();
29809           }
29810           ensureResultOrExceptionIsMutable();
29811           resultOrException_.set(index, value);
29812           onChanged();
29813         } else {
29814           resultOrExceptionBuilder_.setMessage(index, value);
29815         }
29816         return this;
29817       }
29818       /**
29819        * <code>repeated .ResultOrException resultOrException = 1;</code>
29820        */
29821       public Builder setResultOrException(
29822           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder builderForValue) {
29823         if (resultOrExceptionBuilder_ == null) {
29824           ensureResultOrExceptionIsMutable();
29825           resultOrException_.set(index, builderForValue.build());
29826           onChanged();
29827         } else {
29828           resultOrExceptionBuilder_.setMessage(index, builderForValue.build());
29829         }
29830         return this;
29831       }
29832       /**
29833        * <code>repeated .ResultOrException resultOrException = 1;</code>
29834        */
29835       public Builder addResultOrException(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException value) {
29836         if (resultOrExceptionBuilder_ == null) {
29837           if (value == null) {
29838             throw new NullPointerException();
29839           }
29840           ensureResultOrExceptionIsMutable();
29841           resultOrException_.add(value);
29842           onChanged();
29843         } else {
29844           resultOrExceptionBuilder_.addMessage(value);
29845         }
29846         return this;
29847       }
29848       /**
29849        * <code>repeated .ResultOrException resultOrException = 1;</code>
29850        */
29851       public Builder addResultOrException(
29852           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException value) {
29853         if (resultOrExceptionBuilder_ == null) {
29854           if (value == null) {
29855             throw new NullPointerException();
29856           }
29857           ensureResultOrExceptionIsMutable();
29858           resultOrException_.add(index, value);
29859           onChanged();
29860         } else {
29861           resultOrExceptionBuilder_.addMessage(index, value);
29862         }
29863         return this;
29864       }
29865       /**
29866        * <code>repeated .ResultOrException resultOrException = 1;</code>
29867        */
29868       public Builder addResultOrException(
29869           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder builderForValue) {
29870         if (resultOrExceptionBuilder_ == null) {
29871           ensureResultOrExceptionIsMutable();
29872           resultOrException_.add(builderForValue.build());
29873           onChanged();
29874         } else {
29875           resultOrExceptionBuilder_.addMessage(builderForValue.build());
29876         }
29877         return this;
29878       }
29879       /**
29880        * <code>repeated .ResultOrException resultOrException = 1;</code>
29881        */
29882       public Builder addResultOrException(
29883           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder builderForValue) {
29884         if (resultOrExceptionBuilder_ == null) {
29885           ensureResultOrExceptionIsMutable();
29886           resultOrException_.add(index, builderForValue.build());
29887           onChanged();
29888         } else {
29889           resultOrExceptionBuilder_.addMessage(index, builderForValue.build());
29890         }
29891         return this;
29892       }
29893       /**
29894        * <code>repeated .ResultOrException resultOrException = 1;</code>
29895        */
29896       public Builder addAllResultOrException(
29897           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException> values) {
29898         if (resultOrExceptionBuilder_ == null) {
29899           ensureResultOrExceptionIsMutable();
29900           super.addAll(values, resultOrException_);
29901           onChanged();
29902         } else {
29903           resultOrExceptionBuilder_.addAllMessages(values);
29904         }
29905         return this;
29906       }
29907       /**
29908        * <code>repeated .ResultOrException resultOrException = 1;</code>
29909        */
29910       public Builder clearResultOrException() {
29911         if (resultOrExceptionBuilder_ == null) {
29912           resultOrException_ = java.util.Collections.emptyList();
29913           bitField0_ = (bitField0_ & ~0x00000001);
29914           onChanged();
29915         } else {
29916           resultOrExceptionBuilder_.clear();
29917         }
29918         return this;
29919       }
29920       /**
29921        * <code>repeated .ResultOrException resultOrException = 1;</code>
29922        */
29923       public Builder removeResultOrException(int index) {
29924         if (resultOrExceptionBuilder_ == null) {
29925           ensureResultOrExceptionIsMutable();
29926           resultOrException_.remove(index);
29927           onChanged();
29928         } else {
29929           resultOrExceptionBuilder_.remove(index);
29930         }
29931         return this;
29932       }
29933       /**
29934        * <code>repeated .ResultOrException resultOrException = 1;</code>
29935        */
29936       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder getResultOrExceptionBuilder(
29937           int index) {
29938         return getResultOrExceptionFieldBuilder().getBuilder(index);
29939       }
29940       /**
29941        * <code>repeated .ResultOrException resultOrException = 1;</code>
29942        */
29943       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder getResultOrExceptionOrBuilder(
29944           int index) {
29945         if (resultOrExceptionBuilder_ == null) {
29946           return resultOrException_.get(index);  } else {
29947           return resultOrExceptionBuilder_.getMessageOrBuilder(index);
29948         }
29949       }
29950       /**
29951        * <code>repeated .ResultOrException resultOrException = 1;</code>
29952        */
29953       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder> 
29954            getResultOrExceptionOrBuilderList() {
29955         if (resultOrExceptionBuilder_ != null) {
29956           return resultOrExceptionBuilder_.getMessageOrBuilderList();
29957         } else {
29958           return java.util.Collections.unmodifiableList(resultOrException_);
29959         }
29960       }
29961       /**
29962        * <code>repeated .ResultOrException resultOrException = 1;</code>
29963        */
29964       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder addResultOrExceptionBuilder() {
29965         return getResultOrExceptionFieldBuilder().addBuilder(
29966             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.getDefaultInstance());
29967       }
29968       /**
29969        * <code>repeated .ResultOrException resultOrException = 1;</code>
29970        */
29971       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder addResultOrExceptionBuilder(
29972           int index) {
29973         return getResultOrExceptionFieldBuilder().addBuilder(
29974             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.getDefaultInstance());
29975       }
29976       /**
29977        * <code>repeated .ResultOrException resultOrException = 1;</code>
29978        */
29979       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder> 
29980            getResultOrExceptionBuilderList() {
29981         return getResultOrExceptionFieldBuilder().getBuilderList();
29982       }
29983       private com.google.protobuf.RepeatedFieldBuilder<
29984           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder> 
29985           getResultOrExceptionFieldBuilder() {
29986         if (resultOrExceptionBuilder_ == null) {
29987           resultOrExceptionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
29988               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder>(
29989                   resultOrException_,
29990                   ((bitField0_ & 0x00000001) == 0x00000001),
29991                   getParentForChildren(),
29992                   isClean());
29993           resultOrException_ = null;
29994         }
29995         return resultOrExceptionBuilder_;
29996       }
29997 
29998       // optional .NameBytesPair exception = 2;
29999       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
30000       private com.google.protobuf.SingleFieldBuilder<
30001           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> exceptionBuilder_;
30002       /**
30003        * <code>optional .NameBytesPair exception = 2;</code>
30004        *
30005        * <pre>
30006        * If the operation failed globally for this region, this exception is set
30007        * </pre>
30008        */
30009       public boolean hasException() {
30010         return ((bitField0_ & 0x00000002) == 0x00000002);
30011       }
30012       /**
30013        * <code>optional .NameBytesPair exception = 2;</code>
30014        *
30015        * <pre>
30016        * If the operation failed globally for this region, this exception is set
30017        * </pre>
30018        */
30019       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() {
30020         if (exceptionBuilder_ == null) {
30021           return exception_;
30022         } else {
30023           return exceptionBuilder_.getMessage();
30024         }
30025       }
30026       /**
30027        * <code>optional .NameBytesPair exception = 2;</code>
30028        *
30029        * <pre>
30030        * If the operation failed globally for this region, this exception is set
30031        * </pre>
30032        */
30033       public Builder setException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
30034         if (exceptionBuilder_ == null) {
30035           if (value == null) {
30036             throw new NullPointerException();
30037           }
30038           exception_ = value;
30039           onChanged();
30040         } else {
30041           exceptionBuilder_.setMessage(value);
30042         }
30043         bitField0_ |= 0x00000002;
30044         return this;
30045       }
30046       /**
30047        * <code>optional .NameBytesPair exception = 2;</code>
30048        *
30049        * <pre>
30050        * If the operation failed globally for this region, this exception is set
30051        * </pre>
30052        */
30053       public Builder setException(
30054           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
30055         if (exceptionBuilder_ == null) {
30056           exception_ = builderForValue.build();
30057           onChanged();
30058         } else {
30059           exceptionBuilder_.setMessage(builderForValue.build());
30060         }
30061         bitField0_ |= 0x00000002;
30062         return this;
30063       }
30064       /**
30065        * <code>optional .NameBytesPair exception = 2;</code>
30066        *
30067        * <pre>
30068        * If the operation failed globally for this region, this exception is set
30069        * </pre>
30070        */
30071       public Builder mergeException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
30072         if (exceptionBuilder_ == null) {
30073           if (((bitField0_ & 0x00000002) == 0x00000002) &&
30074               exception_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) {
30075             exception_ =
30076               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(exception_).mergeFrom(value).buildPartial();
30077           } else {
30078             exception_ = value;
30079           }
30080           onChanged();
30081         } else {
30082           exceptionBuilder_.mergeFrom(value);
30083         }
30084         bitField0_ |= 0x00000002;
30085         return this;
30086       }
30087       /**
30088        * <code>optional .NameBytesPair exception = 2;</code>
30089        *
30090        * <pre>
30091        * If the operation failed globally for this region, this exception is set
30092        * </pre>
30093        */
30094       public Builder clearException() {
30095         if (exceptionBuilder_ == null) {
30096           exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
30097           onChanged();
30098         } else {
30099           exceptionBuilder_.clear();
30100         }
30101         bitField0_ = (bitField0_ & ~0x00000002);
30102         return this;
30103       }
30104       /**
30105        * <code>optional .NameBytesPair exception = 2;</code>
30106        *
30107        * <pre>
30108        * If the operation failed globally for this region, this exception is set
30109        * </pre>
30110        */
30111       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getExceptionBuilder() {
30112         bitField0_ |= 0x00000002;
30113         onChanged();
30114         return getExceptionFieldBuilder().getBuilder();
30115       }
30116       /**
30117        * <code>optional .NameBytesPair exception = 2;</code>
30118        *
30119        * <pre>
30120        * If the operation failed globally for this region, this exception is set
30121        * </pre>
30122        */
30123       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() {
30124         if (exceptionBuilder_ != null) {
30125           return exceptionBuilder_.getMessageOrBuilder();
30126         } else {
30127           return exception_;
30128         }
30129       }
30130       /**
30131        * <code>optional .NameBytesPair exception = 2;</code>
30132        *
30133        * <pre>
30134        * If the operation failed globally for this region, this exception is set
30135        * </pre>
30136        */
30137       private com.google.protobuf.SingleFieldBuilder<
30138           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
30139           getExceptionFieldBuilder() {
30140         if (exceptionBuilder_ == null) {
30141           exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
30142               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
30143                   exception_,
30144                   getParentForChildren(),
30145                   isClean());
30146           exception_ = null;
30147         }
30148         return exceptionBuilder_;
30149       }
30150 
30151       // @@protoc_insertion_point(builder_scope:RegionActionResult)
30152     }
30153 
30154     static {
30155       defaultInstance = new RegionActionResult(true);
30156       defaultInstance.initFields();
30157     }
30158 
30159     // @@protoc_insertion_point(class_scope:RegionActionResult)
30160   }
30161 
30162   public interface MultiRequestOrBuilder
30163       extends com.google.protobuf.MessageOrBuilder {
30164 
30165     // repeated .RegionAction regionAction = 1;
30166     /**
30167      * <code>repeated .RegionAction regionAction = 1;</code>
30168      */
30169     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction> 
30170         getRegionActionList();
30171     /**
30172      * <code>repeated .RegionAction regionAction = 1;</code>
30173      */
30174     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction getRegionAction(int index);
30175     /**
30176      * <code>repeated .RegionAction regionAction = 1;</code>
30177      */
30178     int getRegionActionCount();
30179     /**
30180      * <code>repeated .RegionAction regionAction = 1;</code>
30181      */
30182     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder> 
30183         getRegionActionOrBuilderList();
30184     /**
30185      * <code>repeated .RegionAction regionAction = 1;</code>
30186      */
30187     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder getRegionActionOrBuilder(
30188         int index);
30189 
30190     // optional uint64 nonceGroup = 2;
30191     /**
30192      * <code>optional uint64 nonceGroup = 2;</code>
30193      */
30194     boolean hasNonceGroup();
30195     /**
30196      * <code>optional uint64 nonceGroup = 2;</code>
30197      */
30198     long getNonceGroup();
30199 
30200     // optional .Condition condition = 3;
30201     /**
30202      * <code>optional .Condition condition = 3;</code>
30203      */
30204     boolean hasCondition();
30205     /**
30206      * <code>optional .Condition condition = 3;</code>
30207      */
30208     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition();
30209     /**
30210      * <code>optional .Condition condition = 3;</code>
30211      */
30212     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder();
30213   }
30214   /**
30215    * Protobuf type {@code MultiRequest}
30216    *
30217    * <pre>
30218    **
30219    * Execute a list of actions on a given region in order.
30220    * Nothing prevents a request to contains a set of RegionAction on the same region.
30221    * For this reason, the matching between the MultiRequest and the MultiResponse is not
30222    *  done by the region specifier but by keeping the order of the RegionActionResult vs.
30223    *  the order of the RegionAction.
30224    * </pre>
30225    */
30226   public static final class MultiRequest extends
30227       com.google.protobuf.GeneratedMessage
30228       implements MultiRequestOrBuilder {
30229     // Use MultiRequest.newBuilder() to construct.
30230     private MultiRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
30231       super(builder);
30232       this.unknownFields = builder.getUnknownFields();
30233     }
30234     private MultiRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
30235 
30236     private static final MultiRequest defaultInstance;
30237     public static MultiRequest getDefaultInstance() {
30238       return defaultInstance;
30239     }
30240 
30241     public MultiRequest getDefaultInstanceForType() {
30242       return defaultInstance;
30243     }
30244 
30245     private final com.google.protobuf.UnknownFieldSet unknownFields;
30246     @java.lang.Override
30247     public final com.google.protobuf.UnknownFieldSet
30248         getUnknownFields() {
30249       return this.unknownFields;
30250     }
30251     private MultiRequest(
30252         com.google.protobuf.CodedInputStream input,
30253         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30254         throws com.google.protobuf.InvalidProtocolBufferException {
30255       initFields();
30256       int mutable_bitField0_ = 0;
30257       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
30258           com.google.protobuf.UnknownFieldSet.newBuilder();
30259       try {
30260         boolean done = false;
30261         while (!done) {
30262           int tag = input.readTag();
30263           switch (tag) {
30264             case 0:
30265               done = true;
30266               break;
30267             default: {
30268               if (!parseUnknownField(input, unknownFields,
30269                                      extensionRegistry, tag)) {
30270                 done = true;
30271               }
30272               break;
30273             }
30274             case 10: {
30275               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
30276                 regionAction_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction>();
30277                 mutable_bitField0_ |= 0x00000001;
30278               }
30279               regionAction_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.PARSER, extensionRegistry));
30280               break;
30281             }
30282             case 16: {
30283               bitField0_ |= 0x00000001;
30284               nonceGroup_ = input.readUInt64();
30285               break;
30286             }
30287             case 26: {
30288               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder subBuilder = null;
30289               if (((bitField0_ & 0x00000002) == 0x00000002)) {
30290                 subBuilder = condition_.toBuilder();
30291               }
30292               condition_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.PARSER, extensionRegistry);
30293               if (subBuilder != null) {
30294                 subBuilder.mergeFrom(condition_);
30295                 condition_ = subBuilder.buildPartial();
30296               }
30297               bitField0_ |= 0x00000002;
30298               break;
30299             }
30300           }
30301         }
30302       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
30303         throw e.setUnfinishedMessage(this);
30304       } catch (java.io.IOException e) {
30305         throw new com.google.protobuf.InvalidProtocolBufferException(
30306             e.getMessage()).setUnfinishedMessage(this);
30307       } finally {
30308         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
30309           regionAction_ = java.util.Collections.unmodifiableList(regionAction_);
30310         }
30311         this.unknownFields = unknownFields.build();
30312         makeExtensionsImmutable();
30313       }
30314     }
30315     public static final com.google.protobuf.Descriptors.Descriptor
30316         getDescriptor() {
30317       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_descriptor;
30318     }
30319 
30320     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
30321         internalGetFieldAccessorTable() {
30322       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_fieldAccessorTable
30323           .ensureFieldAccessorsInitialized(
30324               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder.class);
30325     }
30326 
30327     public static com.google.protobuf.Parser<MultiRequest> PARSER =
30328         new com.google.protobuf.AbstractParser<MultiRequest>() {
30329       public MultiRequest parsePartialFrom(
30330           com.google.protobuf.CodedInputStream input,
30331           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30332           throws com.google.protobuf.InvalidProtocolBufferException {
30333         return new MultiRequest(input, extensionRegistry);
30334       }
30335     };
30336 
30337     @java.lang.Override
30338     public com.google.protobuf.Parser<MultiRequest> getParserForType() {
30339       return PARSER;
30340     }
30341 
30342     private int bitField0_;
30343     // repeated .RegionAction regionAction = 1;
30344     public static final int REGIONACTION_FIELD_NUMBER = 1;
30345     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction> regionAction_;
30346     /**
30347      * <code>repeated .RegionAction regionAction = 1;</code>
30348      */
30349     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction> getRegionActionList() {
30350       return regionAction_;
30351     }
30352     /**
30353      * <code>repeated .RegionAction regionAction = 1;</code>
30354      */
30355     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder> 
30356         getRegionActionOrBuilderList() {
30357       return regionAction_;
30358     }
30359     /**
30360      * <code>repeated .RegionAction regionAction = 1;</code>
30361      */
30362     public int getRegionActionCount() {
30363       return regionAction_.size();
30364     }
30365     /**
30366      * <code>repeated .RegionAction regionAction = 1;</code>
30367      */
30368     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction getRegionAction(int index) {
30369       return regionAction_.get(index);
30370     }
30371     /**
30372      * <code>repeated .RegionAction regionAction = 1;</code>
30373      */
30374     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder getRegionActionOrBuilder(
30375         int index) {
30376       return regionAction_.get(index);
30377     }
30378 
30379     // optional uint64 nonceGroup = 2;
30380     public static final int NONCEGROUP_FIELD_NUMBER = 2;
30381     private long nonceGroup_;
30382     /**
30383      * <code>optional uint64 nonceGroup = 2;</code>
30384      */
30385     public boolean hasNonceGroup() {
30386       return ((bitField0_ & 0x00000001) == 0x00000001);
30387     }
30388     /**
30389      * <code>optional uint64 nonceGroup = 2;</code>
30390      */
30391     public long getNonceGroup() {
30392       return nonceGroup_;
30393     }
30394 
30395     // optional .Condition condition = 3;
30396     public static final int CONDITION_FIELD_NUMBER = 3;
30397     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_;
30398     /**
30399      * <code>optional .Condition condition = 3;</code>
30400      */
30401     public boolean hasCondition() {
30402       return ((bitField0_ & 0x00000002) == 0x00000002);
30403     }
30404     /**
30405      * <code>optional .Condition condition = 3;</code>
30406      */
30407     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() {
30408       return condition_;
30409     }
30410     /**
30411      * <code>optional .Condition condition = 3;</code>
30412      */
30413     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() {
30414       return condition_;
30415     }
30416 
30417     private void initFields() {
30418       regionAction_ = java.util.Collections.emptyList();
30419       nonceGroup_ = 0L;
30420       condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
30421     }
30422     private byte memoizedIsInitialized = -1;
30423     public final boolean isInitialized() {
30424       byte isInitialized = memoizedIsInitialized;
30425       if (isInitialized != -1) return isInitialized == 1;
30426 
30427       for (int i = 0; i < getRegionActionCount(); i++) {
30428         if (!getRegionAction(i).isInitialized()) {
30429           memoizedIsInitialized = 0;
30430           return false;
30431         }
30432       }
30433       if (hasCondition()) {
30434         if (!getCondition().isInitialized()) {
30435           memoizedIsInitialized = 0;
30436           return false;
30437         }
30438       }
30439       memoizedIsInitialized = 1;
30440       return true;
30441     }
30442 
30443     public void writeTo(com.google.protobuf.CodedOutputStream output)
30444                         throws java.io.IOException {
30445       getSerializedSize();
30446       for (int i = 0; i < regionAction_.size(); i++) {
30447         output.writeMessage(1, regionAction_.get(i));
30448       }
30449       if (((bitField0_ & 0x00000001) == 0x00000001)) {
30450         output.writeUInt64(2, nonceGroup_);
30451       }
30452       if (((bitField0_ & 0x00000002) == 0x00000002)) {
30453         output.writeMessage(3, condition_);
30454       }
30455       getUnknownFields().writeTo(output);
30456     }
30457 
30458     private int memoizedSerializedSize = -1;
30459     public int getSerializedSize() {
30460       int size = memoizedSerializedSize;
30461       if (size != -1) return size;
30462 
30463       size = 0;
30464       for (int i = 0; i < regionAction_.size(); i++) {
30465         size += com.google.protobuf.CodedOutputStream
30466           .computeMessageSize(1, regionAction_.get(i));
30467       }
30468       if (((bitField0_ & 0x00000001) == 0x00000001)) {
30469         size += com.google.protobuf.CodedOutputStream
30470           .computeUInt64Size(2, nonceGroup_);
30471       }
30472       if (((bitField0_ & 0x00000002) == 0x00000002)) {
30473         size += com.google.protobuf.CodedOutputStream
30474           .computeMessageSize(3, condition_);
30475       }
30476       size += getUnknownFields().getSerializedSize();
30477       memoizedSerializedSize = size;
30478       return size;
30479     }
30480 
30481     private static final long serialVersionUID = 0L;
30482     @java.lang.Override
30483     protected java.lang.Object writeReplace()
30484         throws java.io.ObjectStreamException {
30485       return super.writeReplace();
30486     }
30487 
30488     @java.lang.Override
30489     public boolean equals(final java.lang.Object obj) {
30490       if (obj == this) {
30491        return true;
30492       }
30493       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)) {
30494         return super.equals(obj);
30495       }
30496       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) obj;
30497 
30498       boolean result = true;
30499       result = result && getRegionActionList()
30500           .equals(other.getRegionActionList());
30501       result = result && (hasNonceGroup() == other.hasNonceGroup());
30502       if (hasNonceGroup()) {
30503         result = result && (getNonceGroup()
30504             == other.getNonceGroup());
30505       }
30506       result = result && (hasCondition() == other.hasCondition());
30507       if (hasCondition()) {
30508         result = result && getCondition()
30509             .equals(other.getCondition());
30510       }
30511       result = result &&
30512           getUnknownFields().equals(other.getUnknownFields());
30513       return result;
30514     }
30515 
30516     private int memoizedHashCode = 0;
30517     @java.lang.Override
30518     public int hashCode() {
30519       if (memoizedHashCode != 0) {
30520         return memoizedHashCode;
30521       }
30522       int hash = 41;
30523       hash = (19 * hash) + getDescriptorForType().hashCode();
30524       if (getRegionActionCount() > 0) {
30525         hash = (37 * hash) + REGIONACTION_FIELD_NUMBER;
30526         hash = (53 * hash) + getRegionActionList().hashCode();
30527       }
30528       if (hasNonceGroup()) {
30529         hash = (37 * hash) + NONCEGROUP_FIELD_NUMBER;
30530         hash = (53 * hash) + hashLong(getNonceGroup());
30531       }
30532       if (hasCondition()) {
30533         hash = (37 * hash) + CONDITION_FIELD_NUMBER;
30534         hash = (53 * hash) + getCondition().hashCode();
30535       }
30536       hash = (29 * hash) + getUnknownFields().hashCode();
30537       memoizedHashCode = hash;
30538       return hash;
30539     }
30540 
30541     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(
30542         com.google.protobuf.ByteString data)
30543         throws com.google.protobuf.InvalidProtocolBufferException {
30544       return PARSER.parseFrom(data);
30545     }
30546     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(
30547         com.google.protobuf.ByteString data,
30548         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30549         throws com.google.protobuf.InvalidProtocolBufferException {
30550       return PARSER.parseFrom(data, extensionRegistry);
30551     }
30552     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(byte[] data)
30553         throws com.google.protobuf.InvalidProtocolBufferException {
30554       return PARSER.parseFrom(data);
30555     }
30556     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(
30557         byte[] data,
30558         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30559         throws com.google.protobuf.InvalidProtocolBufferException {
30560       return PARSER.parseFrom(data, extensionRegistry);
30561     }
30562     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(java.io.InputStream input)
30563         throws java.io.IOException {
30564       return PARSER.parseFrom(input);
30565     }
30566     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(
30567         java.io.InputStream input,
30568         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30569         throws java.io.IOException {
30570       return PARSER.parseFrom(input, extensionRegistry);
30571     }
30572     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom(java.io.InputStream input)
30573         throws java.io.IOException {
30574       return PARSER.parseDelimitedFrom(input);
30575     }
30576     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom(
30577         java.io.InputStream input,
30578         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30579         throws java.io.IOException {
30580       return PARSER.parseDelimitedFrom(input, extensionRegistry);
30581     }
30582     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(
30583         com.google.protobuf.CodedInputStream input)
30584         throws java.io.IOException {
30585       return PARSER.parseFrom(input);
30586     }
30587     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(
30588         com.google.protobuf.CodedInputStream input,
30589         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30590         throws java.io.IOException {
30591       return PARSER.parseFrom(input, extensionRegistry);
30592     }
30593 
30594     public static Builder newBuilder() { return Builder.create(); }
30595     public Builder newBuilderForType() { return newBuilder(); }
30596     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest prototype) {
30597       return newBuilder().mergeFrom(prototype);
30598     }
30599     public Builder toBuilder() { return newBuilder(this); }
30600 
30601     @java.lang.Override
30602     protected Builder newBuilderForType(
30603         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
30604       Builder builder = new Builder(parent);
30605       return builder;
30606     }
30607     /**
30608      * Protobuf type {@code MultiRequest}
30609      *
30610      * <pre>
30611      **
30612      * Execute a list of actions on a given region in order.
30613      * Nothing prevents a request to contains a set of RegionAction on the same region.
30614      * For this reason, the matching between the MultiRequest and the MultiResponse is not
30615      *  done by the region specifier but by keeping the order of the RegionActionResult vs.
30616      *  the order of the RegionAction.
30617      * </pre>
30618      */
30619     public static final class Builder extends
30620         com.google.protobuf.GeneratedMessage.Builder<Builder>
30621        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequestOrBuilder {
30622       public static final com.google.protobuf.Descriptors.Descriptor
30623           getDescriptor() {
30624         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_descriptor;
30625       }
30626 
30627       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
30628           internalGetFieldAccessorTable() {
30629         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_fieldAccessorTable
30630             .ensureFieldAccessorsInitialized(
30631                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder.class);
30632       }
30633 
30634       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.newBuilder()
30635       private Builder() {
30636         maybeForceBuilderInitialization();
30637       }
30638 
30639       private Builder(
30640           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
30641         super(parent);
30642         maybeForceBuilderInitialization();
30643       }
30644       private void maybeForceBuilderInitialization() {
30645         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
30646           getRegionActionFieldBuilder();
30647           getConditionFieldBuilder();
30648         }
30649       }
30650       private static Builder create() {
30651         return new Builder();
30652       }
30653 
30654       public Builder clear() {
30655         super.clear();
30656         if (regionActionBuilder_ == null) {
30657           regionAction_ = java.util.Collections.emptyList();
30658           bitField0_ = (bitField0_ & ~0x00000001);
30659         } else {
30660           regionActionBuilder_.clear();
30661         }
30662         nonceGroup_ = 0L;
30663         bitField0_ = (bitField0_ & ~0x00000002);
30664         if (conditionBuilder_ == null) {
30665           condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
30666         } else {
30667           conditionBuilder_.clear();
30668         }
30669         bitField0_ = (bitField0_ & ~0x00000004);
30670         return this;
30671       }
30672 
30673       public Builder clone() {
30674         return create().mergeFrom(buildPartial());
30675       }
30676 
30677       public com.google.protobuf.Descriptors.Descriptor
30678           getDescriptorForType() {
30679         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_descriptor;
30680       }
30681 
30682       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest getDefaultInstanceForType() {
30683         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance();
30684       }
30685 
30686       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest build() {
30687         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = buildPartial();
30688         if (!result.isInitialized()) {
30689           throw newUninitializedMessageException(result);
30690         }
30691         return result;
30692       }
30693 
30694       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest buildPartial() {
30695         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest(this);
30696         int from_bitField0_ = bitField0_;
30697         int to_bitField0_ = 0;
30698         if (regionActionBuilder_ == null) {
30699           if (((bitField0_ & 0x00000001) == 0x00000001)) {
30700             regionAction_ = java.util.Collections.unmodifiableList(regionAction_);
30701             bitField0_ = (bitField0_ & ~0x00000001);
30702           }
30703           result.regionAction_ = regionAction_;
30704         } else {
30705           result.regionAction_ = regionActionBuilder_.build();
30706         }
30707         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
30708           to_bitField0_ |= 0x00000001;
30709         }
30710         result.nonceGroup_ = nonceGroup_;
30711         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
30712           to_bitField0_ |= 0x00000002;
30713         }
30714         if (conditionBuilder_ == null) {
30715           result.condition_ = condition_;
30716         } else {
30717           result.condition_ = conditionBuilder_.build();
30718         }
30719         result.bitField0_ = to_bitField0_;
30720         onBuilt();
30721         return result;
30722       }
30723 
30724       public Builder mergeFrom(com.google.protobuf.Message other) {
30725         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) {
30726           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)other);
30727         } else {
30728           super.mergeFrom(other);
30729           return this;
30730         }
30731       }
30732 
30733       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest other) {
30734         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance()) return this;
30735         if (regionActionBuilder_ == null) {
30736           if (!other.regionAction_.isEmpty()) {
30737             if (regionAction_.isEmpty()) {
30738               regionAction_ = other.regionAction_;
30739               bitField0_ = (bitField0_ & ~0x00000001);
30740             } else {
30741               ensureRegionActionIsMutable();
30742               regionAction_.addAll(other.regionAction_);
30743             }
30744             onChanged();
30745           }
30746         } else {
30747           if (!other.regionAction_.isEmpty()) {
30748             if (regionActionBuilder_.isEmpty()) {
30749               regionActionBuilder_.dispose();
30750               regionActionBuilder_ = null;
30751               regionAction_ = other.regionAction_;
30752               bitField0_ = (bitField0_ & ~0x00000001);
30753               regionActionBuilder_ = 
30754                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
30755                    getRegionActionFieldBuilder() : null;
30756             } else {
30757               regionActionBuilder_.addAllMessages(other.regionAction_);
30758             }
30759           }
30760         }
30761         if (other.hasNonceGroup()) {
30762           setNonceGroup(other.getNonceGroup());
30763         }
30764         if (other.hasCondition()) {
30765           mergeCondition(other.getCondition());
30766         }
30767         this.mergeUnknownFields(other.getUnknownFields());
30768         return this;
30769       }
30770 
30771       public final boolean isInitialized() {
30772         for (int i = 0; i < getRegionActionCount(); i++) {
30773           if (!getRegionAction(i).isInitialized()) {
30774             
30775             return false;
30776           }
30777         }
30778         if (hasCondition()) {
30779           if (!getCondition().isInitialized()) {
30780             
30781             return false;
30782           }
30783         }
30784         return true;
30785       }
30786 
30787       public Builder mergeFrom(
30788           com.google.protobuf.CodedInputStream input,
30789           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30790           throws java.io.IOException {
30791         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parsedMessage = null;
30792         try {
30793           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
30794         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
30795           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) e.getUnfinishedMessage();
30796           throw e;
30797         } finally {
30798           if (parsedMessage != null) {
30799             mergeFrom(parsedMessage);
30800           }
30801         }
30802         return this;
30803       }
30804       private int bitField0_;
30805 
30806       // repeated .RegionAction regionAction = 1;
30807       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction> regionAction_ =
30808         java.util.Collections.emptyList();
30809       private void ensureRegionActionIsMutable() {
30810         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
30811           regionAction_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction>(regionAction_);
30812           bitField0_ |= 0x00000001;
30813          }
30814       }
30815 
30816       private com.google.protobuf.RepeatedFieldBuilder<
30817           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder> regionActionBuilder_;
30818 
30819       /**
30820        * <code>repeated .RegionAction regionAction = 1;</code>
30821        */
30822       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction> getRegionActionList() {
30823         if (regionActionBuilder_ == null) {
30824           return java.util.Collections.unmodifiableList(regionAction_);
30825         } else {
30826           return regionActionBuilder_.getMessageList();
30827         }
30828       }
30829       /**
30830        * <code>repeated .RegionAction regionAction = 1;</code>
30831        */
30832       public int getRegionActionCount() {
30833         if (regionActionBuilder_ == null) {
30834           return regionAction_.size();
30835         } else {
30836           return regionActionBuilder_.getCount();
30837         }
30838       }
30839       /**
30840        * <code>repeated .RegionAction regionAction = 1;</code>
30841        */
30842       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction getRegionAction(int index) {
30843         if (regionActionBuilder_ == null) {
30844           return regionAction_.get(index);
30845         } else {
30846           return regionActionBuilder_.getMessage(index);
30847         }
30848       }
30849       /**
30850        * <code>repeated .RegionAction regionAction = 1;</code>
30851        */
30852       public Builder setRegionAction(
30853           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction value) {
30854         if (regionActionBuilder_ == null) {
30855           if (value == null) {
30856             throw new NullPointerException();
30857           }
30858           ensureRegionActionIsMutable();
30859           regionAction_.set(index, value);
30860           onChanged();
30861         } else {
30862           regionActionBuilder_.setMessage(index, value);
30863         }
30864         return this;
30865       }
30866       /**
30867        * <code>repeated .RegionAction regionAction = 1;</code>
30868        */
30869       public Builder setRegionAction(
30870           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder builderForValue) {
30871         if (regionActionBuilder_ == null) {
30872           ensureRegionActionIsMutable();
30873           regionAction_.set(index, builderForValue.build());
30874           onChanged();
30875         } else {
30876           regionActionBuilder_.setMessage(index, builderForValue.build());
30877         }
30878         return this;
30879       }
30880       /**
30881        * <code>repeated .RegionAction regionAction = 1;</code>
30882        */
30883       public Builder addRegionAction(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction value) {
30884         if (regionActionBuilder_ == null) {
30885           if (value == null) {
30886             throw new NullPointerException();
30887           }
30888           ensureRegionActionIsMutable();
30889           regionAction_.add(value);
30890           onChanged();
30891         } else {
30892           regionActionBuilder_.addMessage(value);
30893         }
30894         return this;
30895       }
30896       /**
30897        * <code>repeated .RegionAction regionAction = 1;</code>
30898        */
30899       public Builder addRegionAction(
30900           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction value) {
30901         if (regionActionBuilder_ == null) {
30902           if (value == null) {
30903             throw new NullPointerException();
30904           }
30905           ensureRegionActionIsMutable();
30906           regionAction_.add(index, value);
30907           onChanged();
30908         } else {
30909           regionActionBuilder_.addMessage(index, value);
30910         }
30911         return this;
30912       }
30913       /**
30914        * <code>repeated .RegionAction regionAction = 1;</code>
30915        */
30916       public Builder addRegionAction(
30917           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder builderForValue) {
30918         if (regionActionBuilder_ == null) {
30919           ensureRegionActionIsMutable();
30920           regionAction_.add(builderForValue.build());
30921           onChanged();
30922         } else {
30923           regionActionBuilder_.addMessage(builderForValue.build());
30924         }
30925         return this;
30926       }
30927       /**
30928        * <code>repeated .RegionAction regionAction = 1;</code>
30929        */
30930       public Builder addRegionAction(
30931           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder builderForValue) {
30932         if (regionActionBuilder_ == null) {
30933           ensureRegionActionIsMutable();
30934           regionAction_.add(index, builderForValue.build());
30935           onChanged();
30936         } else {
30937           regionActionBuilder_.addMessage(index, builderForValue.build());
30938         }
30939         return this;
30940       }
30941       /**
30942        * <code>repeated .RegionAction regionAction = 1;</code>
30943        */
30944       public Builder addAllRegionAction(
30945           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction> values) {
30946         if (regionActionBuilder_ == null) {
30947           ensureRegionActionIsMutable();
30948           super.addAll(values, regionAction_);
30949           onChanged();
30950         } else {
30951           regionActionBuilder_.addAllMessages(values);
30952         }
30953         return this;
30954       }
30955       /**
30956        * <code>repeated .RegionAction regionAction = 1;</code>
30957        */
30958       public Builder clearRegionAction() {
30959         if (regionActionBuilder_ == null) {
30960           regionAction_ = java.util.Collections.emptyList();
30961           bitField0_ = (bitField0_ & ~0x00000001);
30962           onChanged();
30963         } else {
30964           regionActionBuilder_.clear();
30965         }
30966         return this;
30967       }
30968       /**
30969        * <code>repeated .RegionAction regionAction = 1;</code>
30970        */
30971       public Builder removeRegionAction(int index) {
30972         if (regionActionBuilder_ == null) {
30973           ensureRegionActionIsMutable();
30974           regionAction_.remove(index);
30975           onChanged();
30976         } else {
30977           regionActionBuilder_.remove(index);
30978         }
30979         return this;
30980       }
30981       /**
30982        * <code>repeated .RegionAction regionAction = 1;</code>
30983        */
30984       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder getRegionActionBuilder(
30985           int index) {
30986         return getRegionActionFieldBuilder().getBuilder(index);
30987       }
30988       /**
30989        * <code>repeated .RegionAction regionAction = 1;</code>
30990        */
30991       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder getRegionActionOrBuilder(
30992           int index) {
30993         if (regionActionBuilder_ == null) {
30994           return regionAction_.get(index);  } else {
30995           return regionActionBuilder_.getMessageOrBuilder(index);
30996         }
30997       }
30998       /**
30999        * <code>repeated .RegionAction regionAction = 1;</code>
31000        */
31001       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder> 
31002            getRegionActionOrBuilderList() {
31003         if (regionActionBuilder_ != null) {
31004           return regionActionBuilder_.getMessageOrBuilderList();
31005         } else {
31006           return java.util.Collections.unmodifiableList(regionAction_);
31007         }
31008       }
31009       /**
31010        * <code>repeated .RegionAction regionAction = 1;</code>
31011        */
31012       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder addRegionActionBuilder() {
31013         return getRegionActionFieldBuilder().addBuilder(
31014             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.getDefaultInstance());
31015       }
31016       /**
31017        * <code>repeated .RegionAction regionAction = 1;</code>
31018        */
31019       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder addRegionActionBuilder(
31020           int index) {
31021         return getRegionActionFieldBuilder().addBuilder(
31022             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.getDefaultInstance());
31023       }
31024       /**
31025        * <code>repeated .RegionAction regionAction = 1;</code>
31026        */
31027       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder> 
31028            getRegionActionBuilderList() {
31029         return getRegionActionFieldBuilder().getBuilderList();
31030       }
31031       private com.google.protobuf.RepeatedFieldBuilder<
31032           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder> 
31033           getRegionActionFieldBuilder() {
31034         if (regionActionBuilder_ == null) {
31035           regionActionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
31036               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder>(
31037                   regionAction_,
31038                   ((bitField0_ & 0x00000001) == 0x00000001),
31039                   getParentForChildren(),
31040                   isClean());
31041           regionAction_ = null;
31042         }
31043         return regionActionBuilder_;
31044       }
31045 
31046       // optional uint64 nonceGroup = 2;
31047       private long nonceGroup_ ;
31048       /**
31049        * <code>optional uint64 nonceGroup = 2;</code>
31050        */
31051       public boolean hasNonceGroup() {
31052         return ((bitField0_ & 0x00000002) == 0x00000002);
31053       }
31054       /**
31055        * <code>optional uint64 nonceGroup = 2;</code>
31056        */
31057       public long getNonceGroup() {
31058         return nonceGroup_;
31059       }
31060       /**
31061        * <code>optional uint64 nonceGroup = 2;</code>
31062        */
31063       public Builder setNonceGroup(long value) {
31064         bitField0_ |= 0x00000002;
31065         nonceGroup_ = value;
31066         onChanged();
31067         return this;
31068       }
31069       /**
31070        * <code>optional uint64 nonceGroup = 2;</code>
31071        */
31072       public Builder clearNonceGroup() {
31073         bitField0_ = (bitField0_ & ~0x00000002);
31074         nonceGroup_ = 0L;
31075         onChanged();
31076         return this;
31077       }
31078 
31079       // optional .Condition condition = 3;
31080       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
31081       private com.google.protobuf.SingleFieldBuilder<
31082           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder> conditionBuilder_;
31083       /**
31084        * <code>optional .Condition condition = 3;</code>
31085        */
31086       public boolean hasCondition() {
31087         return ((bitField0_ & 0x00000004) == 0x00000004);
31088       }
31089       /**
31090        * <code>optional .Condition condition = 3;</code>
31091        */
31092       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() {
31093         if (conditionBuilder_ == null) {
31094           return condition_;
31095         } else {
31096           return conditionBuilder_.getMessage();
31097         }
31098       }
31099       /**
31100        * <code>optional .Condition condition = 3;</code>
31101        */
31102       public Builder setCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) {
31103         if (conditionBuilder_ == null) {
31104           if (value == null) {
31105             throw new NullPointerException();
31106           }
31107           condition_ = value;
31108           onChanged();
31109         } else {
31110           conditionBuilder_.setMessage(value);
31111         }
31112         bitField0_ |= 0x00000004;
31113         return this;
31114       }
31115       /**
31116        * <code>optional .Condition condition = 3;</code>
31117        */
31118       public Builder setCondition(
31119           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder builderForValue) {
31120         if (conditionBuilder_ == null) {
31121           condition_ = builderForValue.build();
31122           onChanged();
31123         } else {
31124           conditionBuilder_.setMessage(builderForValue.build());
31125         }
31126         bitField0_ |= 0x00000004;
31127         return this;
31128       }
31129       /**
31130        * <code>optional .Condition condition = 3;</code>
31131        */
31132       public Builder mergeCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) {
31133         if (conditionBuilder_ == null) {
31134           if (((bitField0_ & 0x00000004) == 0x00000004) &&
31135               condition_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) {
31136             condition_ =
31137               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder(condition_).mergeFrom(value).buildPartial();
31138           } else {
31139             condition_ = value;
31140           }
31141           onChanged();
31142         } else {
31143           conditionBuilder_.mergeFrom(value);
31144         }
31145         bitField0_ |= 0x00000004;
31146         return this;
31147       }
31148       /**
31149        * <code>optional .Condition condition = 3;</code>
31150        */
31151       public Builder clearCondition() {
31152         if (conditionBuilder_ == null) {
31153           condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
31154           onChanged();
31155         } else {
31156           conditionBuilder_.clear();
31157         }
31158         bitField0_ = (bitField0_ & ~0x00000004);
31159         return this;
31160       }
31161       /**
31162        * <code>optional .Condition condition = 3;</code>
31163        */
31164       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder getConditionBuilder() {
31165         bitField0_ |= 0x00000004;
31166         onChanged();
31167         return getConditionFieldBuilder().getBuilder();
31168       }
31169       /**
31170        * <code>optional .Condition condition = 3;</code>
31171        */
31172       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() {
31173         if (conditionBuilder_ != null) {
31174           return conditionBuilder_.getMessageOrBuilder();
31175         } else {
31176           return condition_;
31177         }
31178       }
31179       /**
31180        * <code>optional .Condition condition = 3;</code>
31181        */
31182       private com.google.protobuf.SingleFieldBuilder<
31183           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder> 
31184           getConditionFieldBuilder() {
31185         if (conditionBuilder_ == null) {
31186           conditionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
31187               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder>(
31188                   condition_,
31189                   getParentForChildren(),
31190                   isClean());
31191           condition_ = null;
31192         }
31193         return conditionBuilder_;
31194       }
31195 
31196       // @@protoc_insertion_point(builder_scope:MultiRequest)
31197     }
31198 
31199     static {
31200       defaultInstance = new MultiRequest(true);
31201       defaultInstance.initFields();
31202     }
31203 
31204     // @@protoc_insertion_point(class_scope:MultiRequest)
31205   }
31206 
31207   public interface MultiResponseOrBuilder
31208       extends com.google.protobuf.MessageOrBuilder {
31209 
31210     // repeated .RegionActionResult regionActionResult = 1;
31211     /**
31212      * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31213      */
31214     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult> 
31215         getRegionActionResultList();
31216     /**
31217      * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31218      */
31219     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getRegionActionResult(int index);
31220     /**
31221      * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31222      */
31223     int getRegionActionResultCount();
31224     /**
31225      * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31226      */
31227     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> 
31228         getRegionActionResultOrBuilderList();
31229     /**
31230      * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31231      */
31232     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getRegionActionResultOrBuilder(
31233         int index);
31234 
31235     // optional bool processed = 2;
31236     /**
31237      * <code>optional bool processed = 2;</code>
31238      *
31239      * <pre>
31240      * used for mutate to indicate processed only
31241      * </pre>
31242      */
31243     boolean hasProcessed();
31244     /**
31245      * <code>optional bool processed = 2;</code>
31246      *
31247      * <pre>
31248      * used for mutate to indicate processed only
31249      * </pre>
31250      */
31251     boolean getProcessed();
31252   }
31253   /**
31254    * Protobuf type {@code MultiResponse}
31255    */
31256   public static final class MultiResponse extends
31257       com.google.protobuf.GeneratedMessage
31258       implements MultiResponseOrBuilder {
31259     // Use MultiResponse.newBuilder() to construct.
31260     private MultiResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
31261       super(builder);
31262       this.unknownFields = builder.getUnknownFields();
31263     }
31264     private MultiResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
31265 
31266     private static final MultiResponse defaultInstance;
31267     public static MultiResponse getDefaultInstance() {
31268       return defaultInstance;
31269     }
31270 
31271     public MultiResponse getDefaultInstanceForType() {
31272       return defaultInstance;
31273     }
31274 
31275     private final com.google.protobuf.UnknownFieldSet unknownFields;
31276     @java.lang.Override
31277     public final com.google.protobuf.UnknownFieldSet
31278         getUnknownFields() {
31279       return this.unknownFields;
31280     }
31281     private MultiResponse(
31282         com.google.protobuf.CodedInputStream input,
31283         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31284         throws com.google.protobuf.InvalidProtocolBufferException {
31285       initFields();
31286       int mutable_bitField0_ = 0;
31287       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
31288           com.google.protobuf.UnknownFieldSet.newBuilder();
31289       try {
31290         boolean done = false;
31291         while (!done) {
31292           int tag = input.readTag();
31293           switch (tag) {
31294             case 0:
31295               done = true;
31296               break;
31297             default: {
31298               if (!parseUnknownField(input, unknownFields,
31299                                      extensionRegistry, tag)) {
31300                 done = true;
31301               }
31302               break;
31303             }
31304             case 10: {
31305               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
31306                 regionActionResult_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult>();
31307                 mutable_bitField0_ |= 0x00000001;
31308               }
31309               regionActionResult_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.PARSER, extensionRegistry));
31310               break;
31311             }
31312             case 16: {
31313               bitField0_ |= 0x00000001;
31314               processed_ = input.readBool();
31315               break;
31316             }
31317           }
31318         }
31319       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
31320         throw e.setUnfinishedMessage(this);
31321       } catch (java.io.IOException e) {
31322         throw new com.google.protobuf.InvalidProtocolBufferException(
31323             e.getMessage()).setUnfinishedMessage(this);
31324       } finally {
31325         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
31326           regionActionResult_ = java.util.Collections.unmodifiableList(regionActionResult_);
31327         }
31328         this.unknownFields = unknownFields.build();
31329         makeExtensionsImmutable();
31330       }
31331     }
31332     public static final com.google.protobuf.Descriptors.Descriptor
31333         getDescriptor() {
31334       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_descriptor;
31335     }
31336 
31337     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
31338         internalGetFieldAccessorTable() {
31339       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_fieldAccessorTable
31340           .ensureFieldAccessorsInitialized(
31341               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.Builder.class);
31342     }
31343 
31344     public static com.google.protobuf.Parser<MultiResponse> PARSER =
31345         new com.google.protobuf.AbstractParser<MultiResponse>() {
31346       public MultiResponse parsePartialFrom(
31347           com.google.protobuf.CodedInputStream input,
31348           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31349           throws com.google.protobuf.InvalidProtocolBufferException {
31350         return new MultiResponse(input, extensionRegistry);
31351       }
31352     };
31353 
31354     @java.lang.Override
31355     public com.google.protobuf.Parser<MultiResponse> getParserForType() {
31356       return PARSER;
31357     }
31358 
31359     private int bitField0_;
31360     // repeated .RegionActionResult regionActionResult = 1;
31361     public static final int REGIONACTIONRESULT_FIELD_NUMBER = 1;
31362     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult> regionActionResult_;
31363     /**
31364      * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31365      */
31366     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult> getRegionActionResultList() {
31367       return regionActionResult_;
31368     }
31369     /**
31370      * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31371      */
31372     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> 
31373         getRegionActionResultOrBuilderList() {
31374       return regionActionResult_;
31375     }
31376     /**
31377      * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31378      */
31379     public int getRegionActionResultCount() {
31380       return regionActionResult_.size();
31381     }
31382     /**
31383      * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31384      */
31385     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getRegionActionResult(int index) {
31386       return regionActionResult_.get(index);
31387     }
31388     /**
31389      * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31390      */
31391     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getRegionActionResultOrBuilder(
31392         int index) {
31393       return regionActionResult_.get(index);
31394     }
31395 
31396     // optional bool processed = 2;
31397     public static final int PROCESSED_FIELD_NUMBER = 2;
31398     private boolean processed_;
31399     /**
31400      * <code>optional bool processed = 2;</code>
31401      *
31402      * <pre>
31403      * used for mutate to indicate processed only
31404      * </pre>
31405      */
31406     public boolean hasProcessed() {
31407       return ((bitField0_ & 0x00000001) == 0x00000001);
31408     }
31409     /**
31410      * <code>optional bool processed = 2;</code>
31411      *
31412      * <pre>
31413      * used for mutate to indicate processed only
31414      * </pre>
31415      */
31416     public boolean getProcessed() {
31417       return processed_;
31418     }
31419 
31420     private void initFields() {
31421       regionActionResult_ = java.util.Collections.emptyList();
31422       processed_ = false;
31423     }
31424     private byte memoizedIsInitialized = -1;
31425     public final boolean isInitialized() {
31426       byte isInitialized = memoizedIsInitialized;
31427       if (isInitialized != -1) return isInitialized == 1;
31428 
31429       for (int i = 0; i < getRegionActionResultCount(); i++) {
31430         if (!getRegionActionResult(i).isInitialized()) {
31431           memoizedIsInitialized = 0;
31432           return false;
31433         }
31434       }
31435       memoizedIsInitialized = 1;
31436       return true;
31437     }
31438 
31439     public void writeTo(com.google.protobuf.CodedOutputStream output)
31440                         throws java.io.IOException {
31441       getSerializedSize();
31442       for (int i = 0; i < regionActionResult_.size(); i++) {
31443         output.writeMessage(1, regionActionResult_.get(i));
31444       }
31445       if (((bitField0_ & 0x00000001) == 0x00000001)) {
31446         output.writeBool(2, processed_);
31447       }
31448       getUnknownFields().writeTo(output);
31449     }
31450 
31451     private int memoizedSerializedSize = -1;
31452     public int getSerializedSize() {
31453       int size = memoizedSerializedSize;
31454       if (size != -1) return size;
31455 
31456       size = 0;
31457       for (int i = 0; i < regionActionResult_.size(); i++) {
31458         size += com.google.protobuf.CodedOutputStream
31459           .computeMessageSize(1, regionActionResult_.get(i));
31460       }
31461       if (((bitField0_ & 0x00000001) == 0x00000001)) {
31462         size += com.google.protobuf.CodedOutputStream
31463           .computeBoolSize(2, processed_);
31464       }
31465       size += getUnknownFields().getSerializedSize();
31466       memoizedSerializedSize = size;
31467       return size;
31468     }
31469 
31470     private static final long serialVersionUID = 0L;
31471     @java.lang.Override
31472     protected java.lang.Object writeReplace()
31473         throws java.io.ObjectStreamException {
31474       return super.writeReplace();
31475     }
31476 
31477     @java.lang.Override
31478     public boolean equals(final java.lang.Object obj) {
31479       if (obj == this) {
31480        return true;
31481       }
31482       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse)) {
31483         return super.equals(obj);
31484       }
31485       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) obj;
31486 
31487       boolean result = true;
31488       result = result && getRegionActionResultList()
31489           .equals(other.getRegionActionResultList());
31490       result = result && (hasProcessed() == other.hasProcessed());
31491       if (hasProcessed()) {
31492         result = result && (getProcessed()
31493             == other.getProcessed());
31494       }
31495       result = result &&
31496           getUnknownFields().equals(other.getUnknownFields());
31497       return result;
31498     }
31499 
31500     private int memoizedHashCode = 0;
31501     @java.lang.Override
31502     public int hashCode() {
31503       if (memoizedHashCode != 0) {
31504         return memoizedHashCode;
31505       }
31506       int hash = 41;
31507       hash = (19 * hash) + getDescriptorForType().hashCode();
31508       if (getRegionActionResultCount() > 0) {
31509         hash = (37 * hash) + REGIONACTIONRESULT_FIELD_NUMBER;
31510         hash = (53 * hash) + getRegionActionResultList().hashCode();
31511       }
31512       if (hasProcessed()) {
31513         hash = (37 * hash) + PROCESSED_FIELD_NUMBER;
31514         hash = (53 * hash) + hashBoolean(getProcessed());
31515       }
31516       hash = (29 * hash) + getUnknownFields().hashCode();
31517       memoizedHashCode = hash;
31518       return hash;
31519     }
31520 
31521     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(
31522         com.google.protobuf.ByteString data)
31523         throws com.google.protobuf.InvalidProtocolBufferException {
31524       return PARSER.parseFrom(data);
31525     }
31526     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(
31527         com.google.protobuf.ByteString data,
31528         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31529         throws com.google.protobuf.InvalidProtocolBufferException {
31530       return PARSER.parseFrom(data, extensionRegistry);
31531     }
31532     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(byte[] data)
31533         throws com.google.protobuf.InvalidProtocolBufferException {
31534       return PARSER.parseFrom(data);
31535     }
31536     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(
31537         byte[] data,
31538         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31539         throws com.google.protobuf.InvalidProtocolBufferException {
31540       return PARSER.parseFrom(data, extensionRegistry);
31541     }
31542     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(java.io.InputStream input)
31543         throws java.io.IOException {
31544       return PARSER.parseFrom(input);
31545     }
31546     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(
31547         java.io.InputStream input,
31548         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31549         throws java.io.IOException {
31550       return PARSER.parseFrom(input, extensionRegistry);
31551     }
31552     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseDelimitedFrom(java.io.InputStream input)
31553         throws java.io.IOException {
31554       return PARSER.parseDelimitedFrom(input);
31555     }
31556     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseDelimitedFrom(
31557         java.io.InputStream input,
31558         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31559         throws java.io.IOException {
31560       return PARSER.parseDelimitedFrom(input, extensionRegistry);
31561     }
31562     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(
31563         com.google.protobuf.CodedInputStream input)
31564         throws java.io.IOException {
31565       return PARSER.parseFrom(input);
31566     }
31567     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(
31568         com.google.protobuf.CodedInputStream input,
31569         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31570         throws java.io.IOException {
31571       return PARSER.parseFrom(input, extensionRegistry);
31572     }
31573 
31574     public static Builder newBuilder() { return Builder.create(); }
31575     public Builder newBuilderForType() { return newBuilder(); }
31576     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse prototype) {
31577       return newBuilder().mergeFrom(prototype);
31578     }
31579     public Builder toBuilder() { return newBuilder(this); }
31580 
31581     @java.lang.Override
31582     protected Builder newBuilderForType(
31583         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
31584       Builder builder = new Builder(parent);
31585       return builder;
31586     }
31587     /**
31588      * Protobuf type {@code MultiResponse}
31589      */
31590     public static final class Builder extends
31591         com.google.protobuf.GeneratedMessage.Builder<Builder>
31592        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponseOrBuilder {
31593       public static final com.google.protobuf.Descriptors.Descriptor
31594           getDescriptor() {
31595         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_descriptor;
31596       }
31597 
31598       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
31599           internalGetFieldAccessorTable() {
31600         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_fieldAccessorTable
31601             .ensureFieldAccessorsInitialized(
31602                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.Builder.class);
31603       }
31604 
31605       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.newBuilder()
31606       private Builder() {
31607         maybeForceBuilderInitialization();
31608       }
31609 
31610       private Builder(
31611           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
31612         super(parent);
31613         maybeForceBuilderInitialization();
31614       }
31615       private void maybeForceBuilderInitialization() {
31616         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
31617           getRegionActionResultFieldBuilder();
31618         }
31619       }
31620       private static Builder create() {
31621         return new Builder();
31622       }
31623 
31624       public Builder clear() {
31625         super.clear();
31626         if (regionActionResultBuilder_ == null) {
31627           regionActionResult_ = java.util.Collections.emptyList();
31628           bitField0_ = (bitField0_ & ~0x00000001);
31629         } else {
31630           regionActionResultBuilder_.clear();
31631         }
31632         processed_ = false;
31633         bitField0_ = (bitField0_ & ~0x00000002);
31634         return this;
31635       }
31636 
31637       public Builder clone() {
31638         return create().mergeFrom(buildPartial());
31639       }
31640 
31641       public com.google.protobuf.Descriptors.Descriptor
31642           getDescriptorForType() {
31643         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_descriptor;
31644       }
31645 
31646       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse getDefaultInstanceForType() {
31647         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance();
31648       }
31649 
31650       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse build() {
31651         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse result = buildPartial();
31652         if (!result.isInitialized()) {
31653           throw newUninitializedMessageException(result);
31654         }
31655         return result;
31656       }
31657 
31658       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse buildPartial() {
31659         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse(this);
31660         int from_bitField0_ = bitField0_;
31661         int to_bitField0_ = 0;
31662         if (regionActionResultBuilder_ == null) {
31663           if (((bitField0_ & 0x00000001) == 0x00000001)) {
31664             regionActionResult_ = java.util.Collections.unmodifiableList(regionActionResult_);
31665             bitField0_ = (bitField0_ & ~0x00000001);
31666           }
31667           result.regionActionResult_ = regionActionResult_;
31668         } else {
31669           result.regionActionResult_ = regionActionResultBuilder_.build();
31670         }
31671         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
31672           to_bitField0_ |= 0x00000001;
31673         }
31674         result.processed_ = processed_;
31675         result.bitField0_ = to_bitField0_;
31676         onBuilt();
31677         return result;
31678       }
31679 
31680       public Builder mergeFrom(com.google.protobuf.Message other) {
31681         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) {
31682           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse)other);
31683         } else {
31684           super.mergeFrom(other);
31685           return this;
31686         }
31687       }
31688 
31689       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse other) {
31690         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()) return this;
31691         if (regionActionResultBuilder_ == null) {
31692           if (!other.regionActionResult_.isEmpty()) {
31693             if (regionActionResult_.isEmpty()) {
31694               regionActionResult_ = other.regionActionResult_;
31695               bitField0_ = (bitField0_ & ~0x00000001);
31696             } else {
31697               ensureRegionActionResultIsMutable();
31698               regionActionResult_.addAll(other.regionActionResult_);
31699             }
31700             onChanged();
31701           }
31702         } else {
31703           if (!other.regionActionResult_.isEmpty()) {
31704             if (regionActionResultBuilder_.isEmpty()) {
31705               regionActionResultBuilder_.dispose();
31706               regionActionResultBuilder_ = null;
31707               regionActionResult_ = other.regionActionResult_;
31708               bitField0_ = (bitField0_ & ~0x00000001);
31709               regionActionResultBuilder_ = 
31710                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
31711                    getRegionActionResultFieldBuilder() : null;
31712             } else {
31713               regionActionResultBuilder_.addAllMessages(other.regionActionResult_);
31714             }
31715           }
31716         }
31717         if (other.hasProcessed()) {
31718           setProcessed(other.getProcessed());
31719         }
31720         this.mergeUnknownFields(other.getUnknownFields());
31721         return this;
31722       }
31723 
31724       public final boolean isInitialized() {
31725         for (int i = 0; i < getRegionActionResultCount(); i++) {
31726           if (!getRegionActionResult(i).isInitialized()) {
31727             
31728             return false;
31729           }
31730         }
31731         return true;
31732       }
31733 
31734       public Builder mergeFrom(
31735           com.google.protobuf.CodedInputStream input,
31736           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31737           throws java.io.IOException {
31738         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parsedMessage = null;
31739         try {
31740           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
31741         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
31742           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) e.getUnfinishedMessage();
31743           throw e;
31744         } finally {
31745           if (parsedMessage != null) {
31746             mergeFrom(parsedMessage);
31747           }
31748         }
31749         return this;
31750       }
31751       private int bitField0_;
31752 
31753       // repeated .RegionActionResult regionActionResult = 1;
31754       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult> regionActionResult_ =
31755         java.util.Collections.emptyList();
31756       private void ensureRegionActionResultIsMutable() {
31757         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
31758           regionActionResult_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult>(regionActionResult_);
31759           bitField0_ |= 0x00000001;
31760          }
31761       }
31762 
31763       private com.google.protobuf.RepeatedFieldBuilder<
31764           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> regionActionResultBuilder_;
31765 
31766       /**
31767        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31768        */
31769       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult> getRegionActionResultList() {
31770         if (regionActionResultBuilder_ == null) {
31771           return java.util.Collections.unmodifiableList(regionActionResult_);
31772         } else {
31773           return regionActionResultBuilder_.getMessageList();
31774         }
31775       }
31776       /**
31777        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31778        */
31779       public int getRegionActionResultCount() {
31780         if (regionActionResultBuilder_ == null) {
31781           return regionActionResult_.size();
31782         } else {
31783           return regionActionResultBuilder_.getCount();
31784         }
31785       }
31786       /**
31787        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31788        */
31789       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getRegionActionResult(int index) {
31790         if (regionActionResultBuilder_ == null) {
31791           return regionActionResult_.get(index);
31792         } else {
31793           return regionActionResultBuilder_.getMessage(index);
31794         }
31795       }
31796       /**
31797        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31798        */
31799       public Builder setRegionActionResult(
31800           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult value) {
31801         if (regionActionResultBuilder_ == null) {
31802           if (value == null) {
31803             throw new NullPointerException();
31804           }
31805           ensureRegionActionResultIsMutable();
31806           regionActionResult_.set(index, value);
31807           onChanged();
31808         } else {
31809           regionActionResultBuilder_.setMessage(index, value);
31810         }
31811         return this;
31812       }
31813       /**
31814        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31815        */
31816       public Builder setRegionActionResult(
31817           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue) {
31818         if (regionActionResultBuilder_ == null) {
31819           ensureRegionActionResultIsMutable();
31820           regionActionResult_.set(index, builderForValue.build());
31821           onChanged();
31822         } else {
31823           regionActionResultBuilder_.setMessage(index, builderForValue.build());
31824         }
31825         return this;
31826       }
31827       /**
31828        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31829        */
31830       public Builder addRegionActionResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult value) {
31831         if (regionActionResultBuilder_ == null) {
31832           if (value == null) {
31833             throw new NullPointerException();
31834           }
31835           ensureRegionActionResultIsMutable();
31836           regionActionResult_.add(value);
31837           onChanged();
31838         } else {
31839           regionActionResultBuilder_.addMessage(value);
31840         }
31841         return this;
31842       }
31843       /**
31844        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31845        */
31846       public Builder addRegionActionResult(
31847           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult value) {
31848         if (regionActionResultBuilder_ == null) {
31849           if (value == null) {
31850             throw new NullPointerException();
31851           }
31852           ensureRegionActionResultIsMutable();
31853           regionActionResult_.add(index, value);
31854           onChanged();
31855         } else {
31856           regionActionResultBuilder_.addMessage(index, value);
31857         }
31858         return this;
31859       }
31860       /**
31861        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31862        */
31863       public Builder addRegionActionResult(
31864           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue) {
31865         if (regionActionResultBuilder_ == null) {
31866           ensureRegionActionResultIsMutable();
31867           regionActionResult_.add(builderForValue.build());
31868           onChanged();
31869         } else {
31870           regionActionResultBuilder_.addMessage(builderForValue.build());
31871         }
31872         return this;
31873       }
31874       /**
31875        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31876        */
31877       public Builder addRegionActionResult(
31878           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue) {
31879         if (regionActionResultBuilder_ == null) {
31880           ensureRegionActionResultIsMutable();
31881           regionActionResult_.add(index, builderForValue.build());
31882           onChanged();
31883         } else {
31884           regionActionResultBuilder_.addMessage(index, builderForValue.build());
31885         }
31886         return this;
31887       }
31888       /**
31889        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31890        */
31891       public Builder addAllRegionActionResult(
31892           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult> values) {
31893         if (regionActionResultBuilder_ == null) {
31894           ensureRegionActionResultIsMutable();
31895           super.addAll(values, regionActionResult_);
31896           onChanged();
31897         } else {
31898           regionActionResultBuilder_.addAllMessages(values);
31899         }
31900         return this;
31901       }
31902       /**
31903        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31904        */
31905       public Builder clearRegionActionResult() {
31906         if (regionActionResultBuilder_ == null) {
31907           regionActionResult_ = java.util.Collections.emptyList();
31908           bitField0_ = (bitField0_ & ~0x00000001);
31909           onChanged();
31910         } else {
31911           regionActionResultBuilder_.clear();
31912         }
31913         return this;
31914       }
31915       /**
31916        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31917        */
31918       public Builder removeRegionActionResult(int index) {
31919         if (regionActionResultBuilder_ == null) {
31920           ensureRegionActionResultIsMutable();
31921           regionActionResult_.remove(index);
31922           onChanged();
31923         } else {
31924           regionActionResultBuilder_.remove(index);
31925         }
31926         return this;
31927       }
31928       /**
31929        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31930        */
31931       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder getRegionActionResultBuilder(
31932           int index) {
31933         return getRegionActionResultFieldBuilder().getBuilder(index);
31934       }
31935       /**
31936        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31937        */
31938       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getRegionActionResultOrBuilder(
31939           int index) {
31940         if (regionActionResultBuilder_ == null) {
31941           return regionActionResult_.get(index);  } else {
31942           return regionActionResultBuilder_.getMessageOrBuilder(index);
31943         }
31944       }
31945       /**
31946        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31947        */
31948       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> 
31949            getRegionActionResultOrBuilderList() {
31950         if (regionActionResultBuilder_ != null) {
31951           return regionActionResultBuilder_.getMessageOrBuilderList();
31952         } else {
31953           return java.util.Collections.unmodifiableList(regionActionResult_);
31954         }
31955       }
31956       /**
31957        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31958        */
31959       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder addRegionActionResultBuilder() {
31960         return getRegionActionResultFieldBuilder().addBuilder(
31961             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance());
31962       }
31963       /**
31964        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31965        */
31966       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder addRegionActionResultBuilder(
31967           int index) {
31968         return getRegionActionResultFieldBuilder().addBuilder(
31969             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance());
31970       }
31971       /**
31972        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
31973        */
31974       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder> 
31975            getRegionActionResultBuilderList() {
31976         return getRegionActionResultFieldBuilder().getBuilderList();
31977       }
31978       private com.google.protobuf.RepeatedFieldBuilder<
31979           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> 
31980           getRegionActionResultFieldBuilder() {
31981         if (regionActionResultBuilder_ == null) {
31982           regionActionResultBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
31983               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder>(
31984                   regionActionResult_,
31985                   ((bitField0_ & 0x00000001) == 0x00000001),
31986                   getParentForChildren(),
31987                   isClean());
31988           regionActionResult_ = null;
31989         }
31990         return regionActionResultBuilder_;
31991       }
31992 
31993       // optional bool processed = 2;
31994       private boolean processed_ ;
31995       /**
31996        * <code>optional bool processed = 2;</code>
31997        *
31998        * <pre>
31999        * used for mutate to indicate processed only
32000        * </pre>
32001        */
32002       public boolean hasProcessed() {
32003         return ((bitField0_ & 0x00000002) == 0x00000002);
32004       }
32005       /**
32006        * <code>optional bool processed = 2;</code>
32007        *
32008        * <pre>
32009        * used for mutate to indicate processed only
32010        * </pre>
32011        */
32012       public boolean getProcessed() {
32013         return processed_;
32014       }
32015       /**
32016        * <code>optional bool processed = 2;</code>
32017        *
32018        * <pre>
32019        * used for mutate to indicate processed only
32020        * </pre>
32021        */
32022       public Builder setProcessed(boolean value) {
32023         bitField0_ |= 0x00000002;
32024         processed_ = value;
32025         onChanged();
32026         return this;
32027       }
32028       /**
32029        * <code>optional bool processed = 2;</code>
32030        *
32031        * <pre>
32032        * used for mutate to indicate processed only
32033        * </pre>
32034        */
32035       public Builder clearProcessed() {
32036         bitField0_ = (bitField0_ & ~0x00000002);
32037         processed_ = false;
32038         onChanged();
32039         return this;
32040       }
32041 
32042       // @@protoc_insertion_point(builder_scope:MultiResponse)
32043     }
32044 
32045     static {
32046       defaultInstance = new MultiResponse(true);
32047       defaultInstance.initFields();
32048     }
32049 
32050     // @@protoc_insertion_point(class_scope:MultiResponse)
32051   }
32052 
32053   /**
32054    * Protobuf service {@code ClientService}
32055    */
32056   public static abstract class ClientService
32057       implements com.google.protobuf.Service {
32058     protected ClientService() {}
32059 
32060     public interface Interface {
32061       /**
32062        * <code>rpc Get(.GetRequest) returns (.GetResponse);</code>
32063        */
32064       public abstract void get(
32065           com.google.protobuf.RpcController controller,
32066           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request,
32067           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse> done);
32068 
32069       /**
32070        * <code>rpc Mutate(.MutateRequest) returns (.MutateResponse);</code>
32071        */
32072       public abstract void mutate(
32073           com.google.protobuf.RpcController controller,
32074           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request,
32075           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse> done);
32076 
32077       /**
32078        * <code>rpc Scan(.ScanRequest) returns (.ScanResponse);</code>
32079        */
32080       public abstract void scan(
32081           com.google.protobuf.RpcController controller,
32082           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request,
32083           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse> done);
32084 
32085       /**
32086        * <code>rpc BulkLoadHFile(.BulkLoadHFileRequest) returns (.BulkLoadHFileResponse);</code>
32087        */
32088       public abstract void bulkLoadHFile(
32089           com.google.protobuf.RpcController controller,
32090           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request,
32091           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse> done);
32092 
32093       /**
32094        * <code>rpc ExecService(.CoprocessorServiceRequest) returns (.CoprocessorServiceResponse);</code>
32095        */
32096       public abstract void execService(
32097           com.google.protobuf.RpcController controller,
32098           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
32099           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
32100 
32101       /**
32102        * <code>rpc ExecRegionServerService(.CoprocessorServiceRequest) returns (.CoprocessorServiceResponse);</code>
32103        */
32104       public abstract void execRegionServerService(
32105           com.google.protobuf.RpcController controller,
32106           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
32107           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
32108 
32109       /**
32110        * <code>rpc Multi(.MultiRequest) returns (.MultiResponse);</code>
32111        */
32112       public abstract void multi(
32113           com.google.protobuf.RpcController controller,
32114           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request,
32115           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse> done);
32116 
32117     }
32118 
32119     public static com.google.protobuf.Service newReflectiveService(
32120         final Interface impl) {
32121       return new ClientService() {
32122         @java.lang.Override
32123         public  void get(
32124             com.google.protobuf.RpcController controller,
32125             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request,
32126             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse> done) {
32127           impl.get(controller, request, done);
32128         }
32129 
32130         @java.lang.Override
32131         public  void mutate(
32132             com.google.protobuf.RpcController controller,
32133             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request,
32134             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse> done) {
32135           impl.mutate(controller, request, done);
32136         }
32137 
32138         @java.lang.Override
32139         public  void scan(
32140             com.google.protobuf.RpcController controller,
32141             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request,
32142             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse> done) {
32143           impl.scan(controller, request, done);
32144         }
32145 
32146         @java.lang.Override
32147         public  void bulkLoadHFile(
32148             com.google.protobuf.RpcController controller,
32149             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request,
32150             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse> done) {
32151           impl.bulkLoadHFile(controller, request, done);
32152         }
32153 
32154         @java.lang.Override
32155         public  void execService(
32156             com.google.protobuf.RpcController controller,
32157             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
32158             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) {
32159           impl.execService(controller, request, done);
32160         }
32161 
32162         @java.lang.Override
32163         public  void execRegionServerService(
32164             com.google.protobuf.RpcController controller,
32165             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
32166             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) {
32167           impl.execRegionServerService(controller, request, done);
32168         }
32169 
32170         @java.lang.Override
32171         public  void multi(
32172             com.google.protobuf.RpcController controller,
32173             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request,
32174             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse> done) {
32175           impl.multi(controller, request, done);
32176         }
32177 
32178       };
32179     }
32180 
32181     public static com.google.protobuf.BlockingService
32182         newReflectiveBlockingService(final BlockingInterface impl) {
32183       return new com.google.protobuf.BlockingService() {
32184         public final com.google.protobuf.Descriptors.ServiceDescriptor
32185             getDescriptorForType() {
32186           return getDescriptor();
32187         }
32188 
32189         public final com.google.protobuf.Message callBlockingMethod(
32190             com.google.protobuf.Descriptors.MethodDescriptor method,
32191             com.google.protobuf.RpcController controller,
32192             com.google.protobuf.Message request)
32193             throws com.google.protobuf.ServiceException {
32194           if (method.getService() != getDescriptor()) {
32195             throw new java.lang.IllegalArgumentException(
32196               "Service.callBlockingMethod() given method descriptor for " +
32197               "wrong service type.");
32198           }
32199           switch(method.getIndex()) {
32200             case 0:
32201               return impl.get(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)request);
32202             case 1:
32203               return impl.mutate(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)request);
32204             case 2:
32205               return impl.scan(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)request);
32206             case 3:
32207               return impl.bulkLoadHFile(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)request);
32208             case 4:
32209               return impl.execService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request);
32210             case 5:
32211               return impl.execRegionServerService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request);
32212             case 6:
32213               return impl.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request);
32214             default:
32215               throw new java.lang.AssertionError("Can't get here.");
32216           }
32217         }
32218 
32219         public final com.google.protobuf.Message
32220             getRequestPrototype(
32221             com.google.protobuf.Descriptors.MethodDescriptor method) {
32222           if (method.getService() != getDescriptor()) {
32223             throw new java.lang.IllegalArgumentException(
32224               "Service.getRequestPrototype() given method " +
32225               "descriptor for wrong service type.");
32226           }
32227           switch(method.getIndex()) {
32228             case 0:
32229               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance();
32230             case 1:
32231               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance();
32232             case 2:
32233               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance();
32234             case 3:
32235               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance();
32236             case 4:
32237               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
32238             case 5:
32239               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
32240             case 6:
32241               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance();
32242             default:
32243               throw new java.lang.AssertionError("Can't get here.");
32244           }
32245         }
32246 
32247         public final com.google.protobuf.Message
32248             getResponsePrototype(
32249             com.google.protobuf.Descriptors.MethodDescriptor method) {
32250           if (method.getService() != getDescriptor()) {
32251             throw new java.lang.IllegalArgumentException(
32252               "Service.getResponsePrototype() given method " +
32253               "descriptor for wrong service type.");
32254           }
32255           switch(method.getIndex()) {
32256             case 0:
32257               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance();
32258             case 1:
32259               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance();
32260             case 2:
32261               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance();
32262             case 3:
32263               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance();
32264             case 4:
32265               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
32266             case 5:
32267               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
32268             case 6:
32269               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance();
32270             default:
32271               throw new java.lang.AssertionError("Can't get here.");
32272           }
32273         }
32274 
32275       };
32276     }
32277 
32278     /**
32279      * <code>rpc Get(.GetRequest) returns (.GetResponse);</code>
32280      */
32281     public abstract void get(
32282         com.google.protobuf.RpcController controller,
32283         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request,
32284         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse> done);
32285 
32286     /**
32287      * <code>rpc Mutate(.MutateRequest) returns (.MutateResponse);</code>
32288      */
32289     public abstract void mutate(
32290         com.google.protobuf.RpcController controller,
32291         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request,
32292         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse> done);
32293 
32294     /**
32295      * <code>rpc Scan(.ScanRequest) returns (.ScanResponse);</code>
32296      */
32297     public abstract void scan(
32298         com.google.protobuf.RpcController controller,
32299         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request,
32300         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse> done);
32301 
32302     /**
32303      * <code>rpc BulkLoadHFile(.BulkLoadHFileRequest) returns (.BulkLoadHFileResponse);</code>
32304      */
32305     public abstract void bulkLoadHFile(
32306         com.google.protobuf.RpcController controller,
32307         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request,
32308         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse> done);
32309 
32310     /**
32311      * <code>rpc ExecService(.CoprocessorServiceRequest) returns (.CoprocessorServiceResponse);</code>
32312      */
32313     public abstract void execService(
32314         com.google.protobuf.RpcController controller,
32315         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
32316         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
32317 
32318     /**
32319      * <code>rpc ExecRegionServerService(.CoprocessorServiceRequest) returns (.CoprocessorServiceResponse);</code>
32320      */
32321     public abstract void execRegionServerService(
32322         com.google.protobuf.RpcController controller,
32323         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
32324         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
32325 
32326     /**
32327      * <code>rpc Multi(.MultiRequest) returns (.MultiResponse);</code>
32328      */
32329     public abstract void multi(
32330         com.google.protobuf.RpcController controller,
32331         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request,
32332         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse> done);
32333 
32334     public static final
32335         com.google.protobuf.Descriptors.ServiceDescriptor
32336         getDescriptor() {
32337       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor().getServices().get(0);
32338     }
32339     public final com.google.protobuf.Descriptors.ServiceDescriptor
32340         getDescriptorForType() {
32341       return getDescriptor();
32342     }
32343 
32344     public final void callMethod(
32345         com.google.protobuf.Descriptors.MethodDescriptor method,
32346         com.google.protobuf.RpcController controller,
32347         com.google.protobuf.Message request,
32348         com.google.protobuf.RpcCallback<
32349           com.google.protobuf.Message> done) {
32350       if (method.getService() != getDescriptor()) {
32351         throw new java.lang.IllegalArgumentException(
32352           "Service.callMethod() given method descriptor for wrong " +
32353           "service type.");
32354       }
32355       switch(method.getIndex()) {
32356         case 0:
32357           this.get(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)request,
32358             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse>specializeCallback(
32359               done));
32360           return;
32361         case 1:
32362           this.mutate(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)request,
32363             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse>specializeCallback(
32364               done));
32365           return;
32366         case 2:
32367           this.scan(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)request,
32368             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse>specializeCallback(
32369               done));
32370           return;
32371         case 3:
32372           this.bulkLoadHFile(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)request,
32373             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse>specializeCallback(
32374               done));
32375           return;
32376         case 4:
32377           this.execService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request,
32378             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse>specializeCallback(
32379               done));
32380           return;
32381         case 5:
32382           this.execRegionServerService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request,
32383             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse>specializeCallback(
32384               done));
32385           return;
32386         case 6:
32387           this.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request,
32388             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse>specializeCallback(
32389               done));
32390           return;
32391         default:
32392           throw new java.lang.AssertionError("Can't get here.");
32393       }
32394     }
32395 
32396     public final com.google.protobuf.Message
32397         getRequestPrototype(
32398         com.google.protobuf.Descriptors.MethodDescriptor method) {
32399       if (method.getService() != getDescriptor()) {
32400         throw new java.lang.IllegalArgumentException(
32401           "Service.getRequestPrototype() given method " +
32402           "descriptor for wrong service type.");
32403       }
32404       switch(method.getIndex()) {
32405         case 0:
32406           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance();
32407         case 1:
32408           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance();
32409         case 2:
32410           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance();
32411         case 3:
32412           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance();
32413         case 4:
32414           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
32415         case 5:
32416           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
32417         case 6:
32418           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance();
32419         default:
32420           throw new java.lang.AssertionError("Can't get here.");
32421       }
32422     }
32423 
32424     public final com.google.protobuf.Message
32425         getResponsePrototype(
32426         com.google.protobuf.Descriptors.MethodDescriptor method) {
32427       if (method.getService() != getDescriptor()) {
32428         throw new java.lang.IllegalArgumentException(
32429           "Service.getResponsePrototype() given method " +
32430           "descriptor for wrong service type.");
32431       }
32432       switch(method.getIndex()) {
32433         case 0:
32434           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance();
32435         case 1:
32436           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance();
32437         case 2:
32438           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance();
32439         case 3:
32440           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance();
32441         case 4:
32442           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
32443         case 5:
32444           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
32445         case 6:
32446           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance();
32447         default:
32448           throw new java.lang.AssertionError("Can't get here.");
32449       }
32450     }
32451 
32452     public static Stub newStub(
32453         com.google.protobuf.RpcChannel channel) {
32454       return new Stub(channel);
32455     }
32456 
32457     public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService implements Interface {
32458       private Stub(com.google.protobuf.RpcChannel channel) {
32459         this.channel = channel;
32460       }
32461 
32462       private final com.google.protobuf.RpcChannel channel;
32463 
32464       public com.google.protobuf.RpcChannel getChannel() {
32465         return channel;
32466       }
32467 
32468       public  void get(
32469           com.google.protobuf.RpcController controller,
32470           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request,
32471           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse> done) {
32472         channel.callMethod(
32473           getDescriptor().getMethods().get(0),
32474           controller,
32475           request,
32476           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(),
32477           com.google.protobuf.RpcUtil.generalizeCallback(
32478             done,
32479             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class,
32480             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance()));
32481       }
32482 
32483       public  void mutate(
32484           com.google.protobuf.RpcController controller,
32485           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request,
32486           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse> done) {
32487         channel.callMethod(
32488           getDescriptor().getMethods().get(1),
32489           controller,
32490           request,
32491           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(),
32492           com.google.protobuf.RpcUtil.generalizeCallback(
32493             done,
32494             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class,
32495             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance()));
32496       }
32497 
32498       public  void scan(
32499           com.google.protobuf.RpcController controller,
32500           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request,
32501           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse> done) {
32502         channel.callMethod(
32503           getDescriptor().getMethods().get(2),
32504           controller,
32505           request,
32506           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(),
32507           com.google.protobuf.RpcUtil.generalizeCallback(
32508             done,
32509             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class,
32510             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance()));
32511       }
32512 
32513       public  void bulkLoadHFile(
32514           com.google.protobuf.RpcController controller,
32515           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request,
32516           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse> done) {
32517         channel.callMethod(
32518           getDescriptor().getMethods().get(3),
32519           controller,
32520           request,
32521           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(),
32522           com.google.protobuf.RpcUtil.generalizeCallback(
32523             done,
32524             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class,
32525             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance()));
32526       }
32527 
32528       public  void execService(
32529           com.google.protobuf.RpcController controller,
32530           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
32531           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) {
32532         channel.callMethod(
32533           getDescriptor().getMethods().get(4),
32534           controller,
32535           request,
32536           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(),
32537           com.google.protobuf.RpcUtil.generalizeCallback(
32538             done,
32539             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class,
32540             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()));
32541       }
32542 
32543       public  void execRegionServerService(
32544           com.google.protobuf.RpcController controller,
32545           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
32546           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) {
32547         channel.callMethod(
32548           getDescriptor().getMethods().get(5),
32549           controller,
32550           request,
32551           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(),
32552           com.google.protobuf.RpcUtil.generalizeCallback(
32553             done,
32554             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class,
32555             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()));
32556       }
32557 
32558       public  void multi(
32559           com.google.protobuf.RpcController controller,
32560           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request,
32561           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse> done) {
32562         channel.callMethod(
32563           getDescriptor().getMethods().get(6),
32564           controller,
32565           request,
32566           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(),
32567           com.google.protobuf.RpcUtil.generalizeCallback(
32568             done,
32569             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class,
32570             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()));
32571       }
32572     }
32573 
32574     public static BlockingInterface newBlockingStub(
32575         com.google.protobuf.BlockingRpcChannel channel) {
32576       return new BlockingStub(channel);
32577     }
32578 
32579     public interface BlockingInterface {
32580       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse get(
32581           com.google.protobuf.RpcController controller,
32582           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request)
32583           throws com.google.protobuf.ServiceException;
32584 
32585       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse mutate(
32586           com.google.protobuf.RpcController controller,
32587           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request)
32588           throws com.google.protobuf.ServiceException;
32589 
32590       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse scan(
32591           com.google.protobuf.RpcController controller,
32592           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request)
32593           throws com.google.protobuf.ServiceException;
32594 
32595       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse bulkLoadHFile(
32596           com.google.protobuf.RpcController controller,
32597           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request)
32598           throws com.google.protobuf.ServiceException;
32599 
32600       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execService(
32601           com.google.protobuf.RpcController controller,
32602           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
32603           throws com.google.protobuf.ServiceException;
32604 
32605       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execRegionServerService(
32606           com.google.protobuf.RpcController controller,
32607           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
32608           throws com.google.protobuf.ServiceException;
32609 
32610       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi(
32611           com.google.protobuf.RpcController controller,
32612           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request)
32613           throws com.google.protobuf.ServiceException;
32614     }
32615 
32616     private static final class BlockingStub implements BlockingInterface {
32617       private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
32618         this.channel = channel;
32619       }
32620 
32621       private final com.google.protobuf.BlockingRpcChannel channel;
32622 
32623       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse get(
32624           com.google.protobuf.RpcController controller,
32625           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request)
32626           throws com.google.protobuf.ServiceException {
32627         return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) channel.callBlockingMethod(
32628           getDescriptor().getMethods().get(0),
32629           controller,
32630           request,
32631           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance());
32632       }
32633 
32634 
32635       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse mutate(
32636           com.google.protobuf.RpcController controller,
32637           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request)
32638           throws com.google.protobuf.ServiceException {
32639         return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) channel.callBlockingMethod(
32640           getDescriptor().getMethods().get(1),
32641           controller,
32642           request,
32643           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance());
32644       }
32645 
32646 
32647       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse scan(
32648           com.google.protobuf.RpcController controller,
32649           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request)
32650           throws com.google.protobuf.ServiceException {
32651         return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) channel.callBlockingMethod(
32652           getDescriptor().getMethods().get(2),
32653           controller,
32654           request,
32655           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance());
32656       }
32657 
32658 
32659       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse bulkLoadHFile(
32660           com.google.protobuf.RpcController controller,
32661           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request)
32662           throws com.google.protobuf.ServiceException {
32663         return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) channel.callBlockingMethod(
32664           getDescriptor().getMethods().get(3),
32665           controller,
32666           request,
32667           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance());
32668       }
32669 
32670 
32671       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execService(
32672           com.google.protobuf.RpcController controller,
32673           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
32674           throws com.google.protobuf.ServiceException {
32675         return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod(
32676           getDescriptor().getMethods().get(4),
32677           controller,
32678           request,
32679           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance());
32680       }
32681 
32682 
32683       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execRegionServerService(
32684           com.google.protobuf.RpcController controller,
32685           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
32686           throws com.google.protobuf.ServiceException {
32687         return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod(
32688           getDescriptor().getMethods().get(5),
32689           controller,
32690           request,
32691           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance());
32692       }
32693 
32694 
32695       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi(
32696           com.google.protobuf.RpcController controller,
32697           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request)
32698           throws com.google.protobuf.ServiceException {
32699         return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) channel.callBlockingMethod(
32700           getDescriptor().getMethods().get(6),
32701           controller,
32702           request,
32703           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance());
32704       }
32705 
32706     }
32707 
32708     // @@protoc_insertion_point(class_scope:ClientService)
32709   }
32710 
32711   private static com.google.protobuf.Descriptors.Descriptor
32712     internal_static_Authorizations_descriptor;
32713   private static
32714     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32715       internal_static_Authorizations_fieldAccessorTable;
32716   private static com.google.protobuf.Descriptors.Descriptor
32717     internal_static_CellVisibility_descriptor;
32718   private static
32719     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32720       internal_static_CellVisibility_fieldAccessorTable;
32721   private static com.google.protobuf.Descriptors.Descriptor
32722     internal_static_Column_descriptor;
32723   private static
32724     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32725       internal_static_Column_fieldAccessorTable;
32726   private static com.google.protobuf.Descriptors.Descriptor
32727     internal_static_Get_descriptor;
32728   private static
32729     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32730       internal_static_Get_fieldAccessorTable;
32731   private static com.google.protobuf.Descriptors.Descriptor
32732     internal_static_Result_descriptor;
32733   private static
32734     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32735       internal_static_Result_fieldAccessorTable;
32736   private static com.google.protobuf.Descriptors.Descriptor
32737     internal_static_GetRequest_descriptor;
32738   private static
32739     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32740       internal_static_GetRequest_fieldAccessorTable;
32741   private static com.google.protobuf.Descriptors.Descriptor
32742     internal_static_GetResponse_descriptor;
32743   private static
32744     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32745       internal_static_GetResponse_fieldAccessorTable;
32746   private static com.google.protobuf.Descriptors.Descriptor
32747     internal_static_Condition_descriptor;
32748   private static
32749     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32750       internal_static_Condition_fieldAccessorTable;
32751   private static com.google.protobuf.Descriptors.Descriptor
32752     internal_static_MutationProto_descriptor;
32753   private static
32754     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32755       internal_static_MutationProto_fieldAccessorTable;
32756   private static com.google.protobuf.Descriptors.Descriptor
32757     internal_static_MutationProto_ColumnValue_descriptor;
32758   private static
32759     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32760       internal_static_MutationProto_ColumnValue_fieldAccessorTable;
32761   private static com.google.protobuf.Descriptors.Descriptor
32762     internal_static_MutationProto_ColumnValue_QualifierValue_descriptor;
32763   private static
32764     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32765       internal_static_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable;
32766   private static com.google.protobuf.Descriptors.Descriptor
32767     internal_static_MutateRequest_descriptor;
32768   private static
32769     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32770       internal_static_MutateRequest_fieldAccessorTable;
32771   private static com.google.protobuf.Descriptors.Descriptor
32772     internal_static_MutateResponse_descriptor;
32773   private static
32774     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32775       internal_static_MutateResponse_fieldAccessorTable;
32776   private static com.google.protobuf.Descriptors.Descriptor
32777     internal_static_Scan_descriptor;
32778   private static
32779     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32780       internal_static_Scan_fieldAccessorTable;
32781   private static com.google.protobuf.Descriptors.Descriptor
32782     internal_static_ScanRequest_descriptor;
32783   private static
32784     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32785       internal_static_ScanRequest_fieldAccessorTable;
32786   private static com.google.protobuf.Descriptors.Descriptor
32787     internal_static_ScanResponse_descriptor;
32788   private static
32789     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32790       internal_static_ScanResponse_fieldAccessorTable;
32791   private static com.google.protobuf.Descriptors.Descriptor
32792     internal_static_BulkLoadHFileRequest_descriptor;
32793   private static
32794     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32795       internal_static_BulkLoadHFileRequest_fieldAccessorTable;
32796   private static com.google.protobuf.Descriptors.Descriptor
32797     internal_static_BulkLoadHFileRequest_FamilyPath_descriptor;
32798   private static
32799     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32800       internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable;
32801   private static com.google.protobuf.Descriptors.Descriptor
32802     internal_static_BulkLoadHFileResponse_descriptor;
32803   private static
32804     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32805       internal_static_BulkLoadHFileResponse_fieldAccessorTable;
32806   private static com.google.protobuf.Descriptors.Descriptor
32807     internal_static_CoprocessorServiceCall_descriptor;
32808   private static
32809     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32810       internal_static_CoprocessorServiceCall_fieldAccessorTable;
32811   private static com.google.protobuf.Descriptors.Descriptor
32812     internal_static_CoprocessorServiceResult_descriptor;
32813   private static
32814     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32815       internal_static_CoprocessorServiceResult_fieldAccessorTable;
32816   private static com.google.protobuf.Descriptors.Descriptor
32817     internal_static_CoprocessorServiceRequest_descriptor;
32818   private static
32819     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32820       internal_static_CoprocessorServiceRequest_fieldAccessorTable;
32821   private static com.google.protobuf.Descriptors.Descriptor
32822     internal_static_CoprocessorServiceResponse_descriptor;
32823   private static
32824     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32825       internal_static_CoprocessorServiceResponse_fieldAccessorTable;
32826   private static com.google.protobuf.Descriptors.Descriptor
32827     internal_static_Action_descriptor;
32828   private static
32829     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32830       internal_static_Action_fieldAccessorTable;
32831   private static com.google.protobuf.Descriptors.Descriptor
32832     internal_static_RegionAction_descriptor;
32833   private static
32834     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32835       internal_static_RegionAction_fieldAccessorTable;
32836   private static com.google.protobuf.Descriptors.Descriptor
32837     internal_static_RegionLoadStats_descriptor;
32838   private static
32839     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32840       internal_static_RegionLoadStats_fieldAccessorTable;
32841   private static com.google.protobuf.Descriptors.Descriptor
32842     internal_static_ResultOrException_descriptor;
32843   private static
32844     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32845       internal_static_ResultOrException_fieldAccessorTable;
32846   private static com.google.protobuf.Descriptors.Descriptor
32847     internal_static_RegionActionResult_descriptor;
32848   private static
32849     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32850       internal_static_RegionActionResult_fieldAccessorTable;
32851   private static com.google.protobuf.Descriptors.Descriptor
32852     internal_static_MultiRequest_descriptor;
32853   private static
32854     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32855       internal_static_MultiRequest_fieldAccessorTable;
32856   private static com.google.protobuf.Descriptors.Descriptor
32857     internal_static_MultiResponse_descriptor;
32858   private static
32859     com.google.protobuf.GeneratedMessage.FieldAccessorTable
32860       internal_static_MultiResponse_fieldAccessorTable;
32861 
32862   public static com.google.protobuf.Descriptors.FileDescriptor
32863       getDescriptor() {
32864     return descriptor;
32865   }
32866   private static com.google.protobuf.Descriptors.FileDescriptor
32867       descriptor;
32868   static {
32869     java.lang.String[] descriptorData = {
32870       "\n\014Client.proto\032\013HBase.proto\032\014Filter.prot" +
32871       "o\032\nCell.proto\032\020Comparator.proto\"\037\n\016Autho" +
32872       "rizations\022\r\n\005label\030\001 \003(\t\"$\n\016CellVisibili" +
32873       "ty\022\022\n\nexpression\030\001 \002(\t\"+\n\006Column\022\016\n\006fami" +
32874       "ly\030\001 \002(\014\022\021\n\tqualifier\030\002 \003(\014\"\324\002\n\003Get\022\013\n\003r" +
32875       "ow\030\001 \002(\014\022\027\n\006column\030\002 \003(\0132\007.Column\022!\n\tatt" +
32876       "ribute\030\003 \003(\0132\016.NameBytesPair\022\027\n\006filter\030\004" +
32877       " \001(\0132\007.Filter\022\036\n\ntime_range\030\005 \001(\0132\n.Time" +
32878       "Range\022\027\n\014max_versions\030\006 \001(\r:\0011\022\032\n\014cache_" +
32879       "blocks\030\007 \001(\010:\004true\022\023\n\013store_limit\030\010 \001(\r\022",
32880       "\024\n\014store_offset\030\t \001(\r\022\035\n\016existence_only\030" +
32881       "\n \001(\010:\005false\022!\n\022closest_row_before\030\013 \001(\010" +
32882       ":\005false\022)\n\013consistency\030\014 \001(\0162\014.Consisten" +
32883       "cy:\006STRONG\"z\n\006Result\022\023\n\004cell\030\001 \003(\0132\005.Cel" +
32884       "l\022\035\n\025associated_cell_count\030\002 \001(\005\022\016\n\006exis" +
32885       "ts\030\003 \001(\010\022\024\n\005stale\030\004 \001(\010:\005false\022\026\n\007partia" +
32886       "l\030\005 \001(\010:\005false\"A\n\nGetRequest\022 \n\006region\030\001" +
32887       " \002(\0132\020.RegionSpecifier\022\021\n\003get\030\002 \002(\0132\004.Ge" +
32888       "t\"&\n\013GetResponse\022\027\n\006result\030\001 \001(\0132\007.Resul" +
32889       "t\"\200\001\n\tCondition\022\013\n\003row\030\001 \002(\014\022\016\n\006family\030\002",
32890       " \002(\014\022\021\n\tqualifier\030\003 \002(\014\022\"\n\014compare_type\030" +
32891       "\004 \002(\0162\014.CompareType\022\037\n\ncomparator\030\005 \002(\0132" +
32892       "\013.Comparator\"\265\006\n\rMutationProto\022\013\n\003row\030\001 " +
32893       "\001(\014\0220\n\013mutate_type\030\002 \001(\0162\033.MutationProto" +
32894       ".MutationType\0220\n\014column_value\030\003 \003(\0132\032.Mu" +
32895       "tationProto.ColumnValue\022\021\n\ttimestamp\030\004 \001" +
32896       "(\004\022!\n\tattribute\030\005 \003(\0132\016.NameBytesPair\022:\n" +
32897       "\ndurability\030\006 \001(\0162\031.MutationProto.Durabi" +
32898       "lity:\013USE_DEFAULT\022\036\n\ntime_range\030\007 \001(\0132\n." +
32899       "TimeRange\022\035\n\025associated_cell_count\030\010 \001(\005",
32900       "\022\r\n\005nonce\030\t \001(\004\032\347\001\n\013ColumnValue\022\016\n\006famil" +
32901       "y\030\001 \002(\014\022B\n\017qualifier_value\030\002 \003(\0132).Mutat" +
32902       "ionProto.ColumnValue.QualifierValue\032\203\001\n\016" +
32903       "QualifierValue\022\021\n\tqualifier\030\001 \001(\014\022\r\n\005val" +
32904       "ue\030\002 \001(\014\022\021\n\ttimestamp\030\003 \001(\004\022.\n\013delete_ty" +
32905       "pe\030\004 \001(\0162\031.MutationProto.DeleteType\022\014\n\004t" +
32906       "ags\030\005 \001(\014\"W\n\nDurability\022\017\n\013USE_DEFAULT\020\000" +
32907       "\022\014\n\010SKIP_WAL\020\001\022\r\n\tASYNC_WAL\020\002\022\014\n\010SYNC_WA" +
32908       "L\020\003\022\r\n\tFSYNC_WAL\020\004\">\n\014MutationType\022\n\n\006AP" +
32909       "PEND\020\000\022\r\n\tINCREMENT\020\001\022\007\n\003PUT\020\002\022\n\n\006DELETE",
32910       "\020\003\"p\n\nDeleteType\022\026\n\022DELETE_ONE_VERSION\020\000" +
32911       "\022\034\n\030DELETE_MULTIPLE_VERSIONS\020\001\022\021\n\rDELETE" +
32912       "_FAMILY\020\002\022\031\n\025DELETE_FAMILY_VERSION\020\003\"\207\001\n" +
32913       "\rMutateRequest\022 \n\006region\030\001 \002(\0132\020.RegionS" +
32914       "pecifier\022 \n\010mutation\030\002 \002(\0132\016.MutationPro" +
32915       "to\022\035\n\tcondition\030\003 \001(\0132\n.Condition\022\023\n\013non" +
32916       "ce_group\030\004 \001(\004\"<\n\016MutateResponse\022\027\n\006resu" +
32917       "lt\030\001 \001(\0132\007.Result\022\021\n\tprocessed\030\002 \001(\010\"\271\003\n" +
32918       "\004Scan\022\027\n\006column\030\001 \003(\0132\007.Column\022!\n\tattrib" +
32919       "ute\030\002 \003(\0132\016.NameBytesPair\022\021\n\tstart_row\030\003",
32920       " \001(\014\022\020\n\010stop_row\030\004 \001(\014\022\027\n\006filter\030\005 \001(\0132\007" +
32921       ".Filter\022\036\n\ntime_range\030\006 \001(\0132\n.TimeRange\022" +
32922       "\027\n\014max_versions\030\007 \001(\r:\0011\022\032\n\014cache_blocks" +
32923       "\030\010 \001(\010:\004true\022\022\n\nbatch_size\030\t \001(\r\022\027\n\017max_" +
32924       "result_size\030\n \001(\004\022\023\n\013store_limit\030\013 \001(\r\022\024" +
32925       "\n\014store_offset\030\014 \001(\r\022&\n\036load_column_fami" +
32926       "lies_on_demand\030\r \001(\010\022\r\n\005small\030\016 \001(\010\022\027\n\010r" +
32927       "eversed\030\017 \001(\010:\005false\022)\n\013consistency\030\020 \001(" +
32928       "\0162\014.Consistency:\006STRONG\022\017\n\007caching\030\021 \001(\r" +
32929       "\"\342\001\n\013ScanRequest\022 \n\006region\030\001 \001(\0132\020.Regio",
32930       "nSpecifier\022\023\n\004scan\030\002 \001(\0132\005.Scan\022\022\n\nscann" +
32931       "er_id\030\003 \001(\004\022\026\n\016number_of_rows\030\004 \001(\r\022\025\n\rc" +
32932       "lose_scanner\030\005 \001(\010\022\025\n\rnext_call_seq\030\006 \001(" +
32933       "\004\022\037\n\027client_handles_partials\030\007 \001(\010\022!\n\031cl" +
32934       "ient_handles_heartbeats\030\010 \001(\010\"\344\001\n\014ScanRe" +
32935       "sponse\022\030\n\020cells_per_result\030\001 \003(\r\022\022\n\nscan" +
32936       "ner_id\030\002 \001(\004\022\024\n\014more_results\030\003 \001(\010\022\013\n\003tt" +
32937       "l\030\004 \001(\r\022\030\n\007results\030\005 \003(\0132\007.Result\022\r\n\005sta" +
32938       "le\030\006 \001(\010\022\037\n\027partial_flag_per_result\030\007 \003(" +
32939       "\010\022\036\n\026more_results_in_region\030\010 \001(\010\022\031\n\021hea",
32940       "rtbeat_message\030\t \001(\010\"\263\001\n\024BulkLoadHFileRe" +
32941       "quest\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\022" +
32942       "5\n\013family_path\030\002 \003(\0132 .BulkLoadHFileRequ" +
32943       "est.FamilyPath\022\026\n\016assign_seq_num\030\003 \001(\010\032*" +
32944       "\n\nFamilyPath\022\016\n\006family\030\001 \002(\014\022\014\n\004path\030\002 \002" +
32945       "(\t\"\'\n\025BulkLoadHFileResponse\022\016\n\006loaded\030\001 " +
32946       "\002(\010\"a\n\026CoprocessorServiceCall\022\013\n\003row\030\001 \002" +
32947       "(\014\022\024\n\014service_name\030\002 \002(\t\022\023\n\013method_name\030" +
32948       "\003 \002(\t\022\017\n\007request\030\004 \002(\014\"9\n\030CoprocessorSer" +
32949       "viceResult\022\035\n\005value\030\001 \001(\0132\016.NameBytesPai",
32950       "r\"d\n\031CoprocessorServiceRequest\022 \n\006region" +
32951       "\030\001 \002(\0132\020.RegionSpecifier\022%\n\004call\030\002 \002(\0132\027" +
32952       ".CoprocessorServiceCall\"]\n\032CoprocessorSe" +
32953       "rviceResponse\022 \n\006region\030\001 \002(\0132\020.RegionSp" +
32954       "ecifier\022\035\n\005value\030\002 \002(\0132\016.NameBytesPair\"{" +
32955       "\n\006Action\022\r\n\005index\030\001 \001(\r\022 \n\010mutation\030\002 \001(" +
32956       "\0132\016.MutationProto\022\021\n\003get\030\003 \001(\0132\004.Get\022-\n\014" +
32957       "service_call\030\004 \001(\0132\027.CoprocessorServiceC" +
32958       "all\"Y\n\014RegionAction\022 \n\006region\030\001 \002(\0132\020.Re" +
32959       "gionSpecifier\022\016\n\006atomic\030\002 \001(\010\022\027\n\006action\030",
32960       "\003 \003(\0132\007.Action\"D\n\017RegionLoadStats\022\027\n\014mem" +
32961       "storeLoad\030\001 \001(\005:\0010\022\030\n\rheapOccupancy\030\002 \001(" +
32962       "\005:\0010\"\266\001\n\021ResultOrException\022\r\n\005index\030\001 \001(" +
32963       "\r\022\027\n\006result\030\002 \001(\0132\007.Result\022!\n\texception\030" +
32964       "\003 \001(\0132\016.NameBytesPair\0221\n\016service_result\030" +
32965       "\004 \001(\0132\031.CoprocessorServiceResult\022#\n\tload" +
32966       "Stats\030\005 \001(\0132\020.RegionLoadStats\"f\n\022RegionA" +
32967       "ctionResult\022-\n\021resultOrException\030\001 \003(\0132\022" +
32968       ".ResultOrException\022!\n\texception\030\002 \001(\0132\016." +
32969       "NameBytesPair\"f\n\014MultiRequest\022#\n\014regionA",
32970       "ction\030\001 \003(\0132\r.RegionAction\022\022\n\nnonceGroup" +
32971       "\030\002 \001(\004\022\035\n\tcondition\030\003 \001(\0132\n.Condition\"S\n" +
32972       "\rMultiResponse\022/\n\022regionActionResult\030\001 \003" +
32973       "(\0132\023.RegionActionResult\022\021\n\tprocessed\030\002 \001" +
32974       "(\010*\'\n\013Consistency\022\n\n\006STRONG\020\000\022\014\n\010TIMELIN" +
32975       "E\020\0012\205\003\n\rClientService\022 \n\003Get\022\013.GetReques" +
32976       "t\032\014.GetResponse\022)\n\006Mutate\022\016.MutateReques" +
32977       "t\032\017.MutateResponse\022#\n\004Scan\022\014.ScanRequest" +
32978       "\032\r.ScanResponse\022>\n\rBulkLoadHFile\022\025.BulkL" +
32979       "oadHFileRequest\032\026.BulkLoadHFileResponse\022",
32980       "F\n\013ExecService\022\032.CoprocessorServiceReque" +
32981       "st\032\033.CoprocessorServiceResponse\022R\n\027ExecR" +
32982       "egionServerService\022\032.CoprocessorServiceR" +
32983       "equest\032\033.CoprocessorServiceResponse\022&\n\005M" +
32984       "ulti\022\r.MultiRequest\032\016.MultiResponseBB\n*o" +
32985       "rg.apache.hadoop.hbase.protobuf.generate" +
32986       "dB\014ClientProtosH\001\210\001\001\240\001\001"
32987     };
32988     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
32989       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
32990         public com.google.protobuf.ExtensionRegistry assignDescriptors(
32991             com.google.protobuf.Descriptors.FileDescriptor root) {
32992           descriptor = root;
32993           internal_static_Authorizations_descriptor =
32994             getDescriptor().getMessageTypes().get(0);
32995           internal_static_Authorizations_fieldAccessorTable = new
32996             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
32997               internal_static_Authorizations_descriptor,
32998               new java.lang.String[] { "Label", });
32999           internal_static_CellVisibility_descriptor =
33000             getDescriptor().getMessageTypes().get(1);
33001           internal_static_CellVisibility_fieldAccessorTable = new
33002             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33003               internal_static_CellVisibility_descriptor,
33004               new java.lang.String[] { "Expression", });
33005           internal_static_Column_descriptor =
33006             getDescriptor().getMessageTypes().get(2);
33007           internal_static_Column_fieldAccessorTable = new
33008             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33009               internal_static_Column_descriptor,
33010               new java.lang.String[] { "Family", "Qualifier", });
33011           internal_static_Get_descriptor =
33012             getDescriptor().getMessageTypes().get(3);
33013           internal_static_Get_fieldAccessorTable = new
33014             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33015               internal_static_Get_descriptor,
33016               new java.lang.String[] { "Row", "Column", "Attribute", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "StoreLimit", "StoreOffset", "ExistenceOnly", "ClosestRowBefore", "Consistency", });
33017           internal_static_Result_descriptor =
33018             getDescriptor().getMessageTypes().get(4);
33019           internal_static_Result_fieldAccessorTable = new
33020             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33021               internal_static_Result_descriptor,
33022               new java.lang.String[] { "Cell", "AssociatedCellCount", "Exists", "Stale", "Partial", });
33023           internal_static_GetRequest_descriptor =
33024             getDescriptor().getMessageTypes().get(5);
33025           internal_static_GetRequest_fieldAccessorTable = new
33026             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33027               internal_static_GetRequest_descriptor,
33028               new java.lang.String[] { "Region", "Get", });
33029           internal_static_GetResponse_descriptor =
33030             getDescriptor().getMessageTypes().get(6);
33031           internal_static_GetResponse_fieldAccessorTable = new
33032             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33033               internal_static_GetResponse_descriptor,
33034               new java.lang.String[] { "Result", });
33035           internal_static_Condition_descriptor =
33036             getDescriptor().getMessageTypes().get(7);
33037           internal_static_Condition_fieldAccessorTable = new
33038             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33039               internal_static_Condition_descriptor,
33040               new java.lang.String[] { "Row", "Family", "Qualifier", "CompareType", "Comparator", });
33041           internal_static_MutationProto_descriptor =
33042             getDescriptor().getMessageTypes().get(8);
33043           internal_static_MutationProto_fieldAccessorTable = new
33044             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33045               internal_static_MutationProto_descriptor,
33046               new java.lang.String[] { "Row", "MutateType", "ColumnValue", "Timestamp", "Attribute", "Durability", "TimeRange", "AssociatedCellCount", "Nonce", });
33047           internal_static_MutationProto_ColumnValue_descriptor =
33048             internal_static_MutationProto_descriptor.getNestedTypes().get(0);
33049           internal_static_MutationProto_ColumnValue_fieldAccessorTable = new
33050             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33051               internal_static_MutationProto_ColumnValue_descriptor,
33052               new java.lang.String[] { "Family", "QualifierValue", });
33053           internal_static_MutationProto_ColumnValue_QualifierValue_descriptor =
33054             internal_static_MutationProto_ColumnValue_descriptor.getNestedTypes().get(0);
33055           internal_static_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable = new
33056             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33057               internal_static_MutationProto_ColumnValue_QualifierValue_descriptor,
33058               new java.lang.String[] { "Qualifier", "Value", "Timestamp", "DeleteType", "Tags", });
33059           internal_static_MutateRequest_descriptor =
33060             getDescriptor().getMessageTypes().get(9);
33061           internal_static_MutateRequest_fieldAccessorTable = new
33062             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33063               internal_static_MutateRequest_descriptor,
33064               new java.lang.String[] { "Region", "Mutation", "Condition", "NonceGroup", });
33065           internal_static_MutateResponse_descriptor =
33066             getDescriptor().getMessageTypes().get(10);
33067           internal_static_MutateResponse_fieldAccessorTable = new
33068             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33069               internal_static_MutateResponse_descriptor,
33070               new java.lang.String[] { "Result", "Processed", });
33071           internal_static_Scan_descriptor =
33072             getDescriptor().getMessageTypes().get(11);
33073           internal_static_Scan_fieldAccessorTable = new
33074             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33075               internal_static_Scan_descriptor,
33076               new java.lang.String[] { "Column", "Attribute", "StartRow", "StopRow", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "BatchSize", "MaxResultSize", "StoreLimit", "StoreOffset", "LoadColumnFamiliesOnDemand", "Small", "Reversed", "Consistency", "Caching", });
33077           internal_static_ScanRequest_descriptor =
33078             getDescriptor().getMessageTypes().get(12);
33079           internal_static_ScanRequest_fieldAccessorTable = new
33080             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33081               internal_static_ScanRequest_descriptor,
33082               new java.lang.String[] { "Region", "Scan", "ScannerId", "NumberOfRows", "CloseScanner", "NextCallSeq", "ClientHandlesPartials", "ClientHandlesHeartbeats", });
33083           internal_static_ScanResponse_descriptor =
33084             getDescriptor().getMessageTypes().get(13);
33085           internal_static_ScanResponse_fieldAccessorTable = new
33086             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33087               internal_static_ScanResponse_descriptor,
33088               new java.lang.String[] { "CellsPerResult", "ScannerId", "MoreResults", "Ttl", "Results", "Stale", "PartialFlagPerResult", "MoreResultsInRegion", "HeartbeatMessage", });
33089           internal_static_BulkLoadHFileRequest_descriptor =
33090             getDescriptor().getMessageTypes().get(14);
33091           internal_static_BulkLoadHFileRequest_fieldAccessorTable = new
33092             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33093               internal_static_BulkLoadHFileRequest_descriptor,
33094               new java.lang.String[] { "Region", "FamilyPath", "AssignSeqNum", });
33095           internal_static_BulkLoadHFileRequest_FamilyPath_descriptor =
33096             internal_static_BulkLoadHFileRequest_descriptor.getNestedTypes().get(0);
33097           internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable = new
33098             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33099               internal_static_BulkLoadHFileRequest_FamilyPath_descriptor,
33100               new java.lang.String[] { "Family", "Path", });
33101           internal_static_BulkLoadHFileResponse_descriptor =
33102             getDescriptor().getMessageTypes().get(15);
33103           internal_static_BulkLoadHFileResponse_fieldAccessorTable = new
33104             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33105               internal_static_BulkLoadHFileResponse_descriptor,
33106               new java.lang.String[] { "Loaded", });
33107           internal_static_CoprocessorServiceCall_descriptor =
33108             getDescriptor().getMessageTypes().get(16);
33109           internal_static_CoprocessorServiceCall_fieldAccessorTable = new
33110             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33111               internal_static_CoprocessorServiceCall_descriptor,
33112               new java.lang.String[] { "Row", "ServiceName", "MethodName", "Request", });
33113           internal_static_CoprocessorServiceResult_descriptor =
33114             getDescriptor().getMessageTypes().get(17);
33115           internal_static_CoprocessorServiceResult_fieldAccessorTable = new
33116             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33117               internal_static_CoprocessorServiceResult_descriptor,
33118               new java.lang.String[] { "Value", });
33119           internal_static_CoprocessorServiceRequest_descriptor =
33120             getDescriptor().getMessageTypes().get(18);
33121           internal_static_CoprocessorServiceRequest_fieldAccessorTable = new
33122             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33123               internal_static_CoprocessorServiceRequest_descriptor,
33124               new java.lang.String[] { "Region", "Call", });
33125           internal_static_CoprocessorServiceResponse_descriptor =
33126             getDescriptor().getMessageTypes().get(19);
33127           internal_static_CoprocessorServiceResponse_fieldAccessorTable = new
33128             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33129               internal_static_CoprocessorServiceResponse_descriptor,
33130               new java.lang.String[] { "Region", "Value", });
33131           internal_static_Action_descriptor =
33132             getDescriptor().getMessageTypes().get(20);
33133           internal_static_Action_fieldAccessorTable = new
33134             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33135               internal_static_Action_descriptor,
33136               new java.lang.String[] { "Index", "Mutation", "Get", "ServiceCall", });
33137           internal_static_RegionAction_descriptor =
33138             getDescriptor().getMessageTypes().get(21);
33139           internal_static_RegionAction_fieldAccessorTable = new
33140             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33141               internal_static_RegionAction_descriptor,
33142               new java.lang.String[] { "Region", "Atomic", "Action", });
33143           internal_static_RegionLoadStats_descriptor =
33144             getDescriptor().getMessageTypes().get(22);
33145           internal_static_RegionLoadStats_fieldAccessorTable = new
33146             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33147               internal_static_RegionLoadStats_descriptor,
33148               new java.lang.String[] { "MemstoreLoad", "HeapOccupancy", });
33149           internal_static_ResultOrException_descriptor =
33150             getDescriptor().getMessageTypes().get(23);
33151           internal_static_ResultOrException_fieldAccessorTable = new
33152             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33153               internal_static_ResultOrException_descriptor,
33154               new java.lang.String[] { "Index", "Result", "Exception", "ServiceResult", "LoadStats", });
33155           internal_static_RegionActionResult_descriptor =
33156             getDescriptor().getMessageTypes().get(24);
33157           internal_static_RegionActionResult_fieldAccessorTable = new
33158             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33159               internal_static_RegionActionResult_descriptor,
33160               new java.lang.String[] { "ResultOrException", "Exception", });
33161           internal_static_MultiRequest_descriptor =
33162             getDescriptor().getMessageTypes().get(25);
33163           internal_static_MultiRequest_fieldAccessorTable = new
33164             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33165               internal_static_MultiRequest_descriptor,
33166               new java.lang.String[] { "RegionAction", "NonceGroup", "Condition", });
33167           internal_static_MultiResponse_descriptor =
33168             getDescriptor().getMessageTypes().get(26);
33169           internal_static_MultiResponse_fieldAccessorTable = new
33170             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
33171               internal_static_MultiResponse_descriptor,
33172               new java.lang.String[] { "RegionActionResult", "Processed", });
33173           return null;
33174         }
33175       };
33176     com.google.protobuf.Descriptors.FileDescriptor
33177       .internalBuildGeneratedFileFrom(descriptorData,
33178         new com.google.protobuf.Descriptors.FileDescriptor[] {
33179           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
33180           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.getDescriptor(),
33181           org.apache.hadoop.hbase.protobuf.generated.CellProtos.getDescriptor(),
33182           org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.getDescriptor(),
33183         }, assigner);
33184   }
33185 
33186   // @@protoc_insertion_point(outer_class_scope)
33187 }